int ary_includes(ary_t *ary, int val);
// returns a count indicating the number of times the value appears in the collection
-// refactored from _Wikitext_count()
int ary_count(ary_t *ary, int item);
void ary_free(ary_t *ary);
char *pe = p + len;
token_t token;
next_token(&token, NULL, p, pe);
- rb_ary_push(tokens, _Wikitext_token(&token));
+ rb_ary_push(tokens, wiki_token(&token));
while (token.type != END_OF_FILE)
{
next_token(&token, &token, NULL, pe);
- rb_ary_push(tokens, _Wikitext_token(&token));
+ rb_ary_push(tokens, wiki_token(&token));
}
return tokens;
}
}
// for testing and debugging only
-VALUE _Wikitext_token(token_t *token)
+VALUE wiki_token(token_t *token)
{
VALUE object = rb_class_new_instance(0, NULL, cWikitextParserToken);
(void)rb_iv_set(object, "@start", LONG2NUM((long)token->start));
VALUE Wikitext_parser_token_types(VALUE self);
-VALUE _Wikitext_token(token_t *token);
+VALUE wiki_token(token_t *token);
Wikitext::Parser.sanitize_link_target('foo, "bar" & baz €').should == 'foo, "bar" & baz €'
end
- # here we're exercising the _Wikitext_utf8_to_utf32 function
+ # here we're exercising the wiki_utf8_to_utf32 function
describe 'with invalidly encoded input' do
it 'should raise an exception for missing second byte' do
lambda {
# this is the general case of the bug covered in the previous spec
# any token that appears as the first token after a PRE token can manifest this bug
- # PRINTABLE didn't only because it called _Wikitext_start_para_if_necessary(), which handled the pending CRLF
+ # PRINTABLE didn't only because it called wiki_start_para_if_necessary(), which handled the pending CRLF
it 'should emit pending newlines for all token types found inside PRE and PRE_START blocks' do
# PRE_START
input = dedent <<-END
END
@parser.parse(input).should == expected
- # these tokens weren't affected by the bug, seeing as they either call _Wikitext_start_para_if_necessary()
+ # these tokens weren't affected by the bug, seeing as they either call wiki_start_para_if_necessary()
# or they can only appear in PRE_START (not PRE) thanks to the tokenizer
# but we add specs for them to make sure that the issue never crops up for them in the future