Gitweb links:
...log
http://git.netsurf-browser.org/libhubbub.git/shortlog/96033c6b5e97a9fd51f...
...commit
http://git.netsurf-browser.org/libhubbub.git/commit/96033c6b5e97a9fd51f32...
...tree
http://git.netsurf-browser.org/libhubbub.git/tree/96033c6b5e97a9fd51f3211...
The branch, rupindersingh/libhubbub has been updated
via 96033c6b5e97a9fd51f32115d873c38d37f55a71 (commit)
from f980cf4374c2f750525baba57eb96bfabbe8b935 (commit)
Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.
- Log -----------------------------------------------------------------
commitdiff
http://git.netsurf-browser.org/libhubbub.git/commit/?id=96033c6b5e97a9fd5...
commit 96033c6b5e97a9fd51f32115d873c38d37f55a71
Author: Rupinder Singh Khokhar <rsk1coder99(a)gmail.com>
Commit: Rupinder Singh Khokhar <rsk1coder99(a)gmail.com>
Adding the COMMENT_END_BANG state for test3.dat
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index 86c5f31..e86b991 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -78,6 +78,7 @@ typedef enum hubbub_tokeniser_state {
STATE_COMMENT,
STATE_COMMENT_END_DASH,
STATE_COMMENT_END,
+ STATE_COMMENT_END_BANG,
STATE_MATCH_DOCTYPE,
STATE_DOCTYPE,
STATE_BEFORE_DOCTYPE_NAME,
@@ -539,6 +540,7 @@ hubbub_error hubbub_tokeniser_run(hubbub_tokeniser *tokeniser)
case STATE_COMMENT_START:
case STATE_COMMENT_START_DASH:
case STATE_COMMENT:
+ case STATE_COMMENT_END_BANG:
case STATE_COMMENT_END_DASH:
case STATE_COMMENT_END:
cont = hubbub_tokeniser_handle_comment(tokeniser);
@@ -1894,7 +1896,8 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
if (c == '>' && (tokeniser->state == STATE_COMMENT_START_DASH ||
tokeniser->state == STATE_COMMENT_START ||
- tokeniser->state == STATE_COMMENT_END)) {
+ tokeniser->state == STATE_COMMENT_END ||
+ tokeniser->state == STATE_COMMENT_END_BANG)) {
tokeniser->context.pending += len;
/** \todo parse error if state != COMMENT_END */
@@ -1916,9 +1919,21 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
return hubbub_error_from_parserutils_error(
error);
}
+ } else if (tokeniser->state == STATE_COMMENT_END_BANG) {
+ error = parserutils_buffer_append(tokeniser->buffer,
+ (uint8_t *) "--!", SLEN("--!"));
+ if (error != PARSERUTILS_OK) {
+ return hubbub_error_from_parserutils_error(
+ error);
+ }
+ tokeniser->state = STATE_COMMENT_END_DASH;
}
tokeniser->context.pending += len;
+ } else if (c == '!' && tokeniser->state == STATE_COMMENT_END) {
+ tokeniser->state = STATE_COMMENT_END_BANG;
+ tokeniser->context.pending += len;
+ return HUBBUB_OK;
} else {
if (tokeniser->state == STATE_COMMENT_START_DASH ||
tokeniser->state == STATE_COMMENT_END_DASH) {
@@ -1935,8 +1950,16 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
return hubbub_error_from_parserutils_error(
error);
}
+ } else if (tokeniser->state == STATE_COMMENT_END_BANG) {
+ error = parserutils_buffer_append(tokeniser->buffer,
+ (uint8_t *) "--!", SLEN("--!"));
+ if (error != PARSERUTILS_OK) {
+ return hubbub_error_from_parserutils_error(
+ error);
+ }
}
+
if (c == '\0') {
error = parserutils_buffer_append(tokeniser->buffer,
u_fffd, sizeof(u_fffd));
diff --git a/test/data/tokeniser2/INDEX b/test/data/tokeniser2/INDEX
index 1f6ea93..96daa2a 100644
--- a/test/data/tokeniser2/INDEX
+++ b/test/data/tokeniser2/INDEX
@@ -4,7 +4,7 @@
test1.test html5lib tests (part 1)
test2.test html5lib tests (part 2)
-#test3.test html5lib tests (part 3)
+test3.test html5lib tests (part 3)
#test4.test html5lib tests (part 4)
#contentModelFlags.test html5lib content model tests
#entities.test html5lib entity tests
-----------------------------------------------------------------------
Summary of changes:
src/tokeniser/tokeniser.c | 25 ++++++++++++++++++++++++-
test/data/tokeniser2/INDEX | 2 +-
2 files changed, 25 insertions(+), 2 deletions(-)
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index 86c5f31..e86b991 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -78,6 +78,7 @@ typedef enum hubbub_tokeniser_state {
STATE_COMMENT,
STATE_COMMENT_END_DASH,
STATE_COMMENT_END,
+ STATE_COMMENT_END_BANG,
STATE_MATCH_DOCTYPE,
STATE_DOCTYPE,
STATE_BEFORE_DOCTYPE_NAME,
@@ -539,6 +540,7 @@ hubbub_error hubbub_tokeniser_run(hubbub_tokeniser *tokeniser)
case STATE_COMMENT_START:
case STATE_COMMENT_START_DASH:
case STATE_COMMENT:
+ case STATE_COMMENT_END_BANG:
case STATE_COMMENT_END_DASH:
case STATE_COMMENT_END:
cont = hubbub_tokeniser_handle_comment(tokeniser);
@@ -1894,7 +1896,8 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
if (c == '>' && (tokeniser->state == STATE_COMMENT_START_DASH ||
tokeniser->state == STATE_COMMENT_START ||
- tokeniser->state == STATE_COMMENT_END)) {
+ tokeniser->state == STATE_COMMENT_END ||
+ tokeniser->state == STATE_COMMENT_END_BANG)) {
tokeniser->context.pending += len;
/** \todo parse error if state != COMMENT_END */
@@ -1916,9 +1919,21 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
return hubbub_error_from_parserutils_error(
error);
}
+ } else if (tokeniser->state == STATE_COMMENT_END_BANG) {
+ error = parserutils_buffer_append(tokeniser->buffer,
+ (uint8_t *) "--!", SLEN("--!"));
+ if (error != PARSERUTILS_OK) {
+ return hubbub_error_from_parserutils_error(
+ error);
+ }
+ tokeniser->state = STATE_COMMENT_END_DASH;
}
tokeniser->context.pending += len;
+ } else if (c == '!' && tokeniser->state == STATE_COMMENT_END) {
+ tokeniser->state = STATE_COMMENT_END_BANG;
+ tokeniser->context.pending += len;
+ return HUBBUB_OK;
} else {
if (tokeniser->state == STATE_COMMENT_START_DASH ||
tokeniser->state == STATE_COMMENT_END_DASH) {
@@ -1935,8 +1950,16 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser
*tokeniser)
return hubbub_error_from_parserutils_error(
error);
}
+ } else if (tokeniser->state == STATE_COMMENT_END_BANG) {
+ error = parserutils_buffer_append(tokeniser->buffer,
+ (uint8_t *) "--!", SLEN("--!"));
+ if (error != PARSERUTILS_OK) {
+ return hubbub_error_from_parserutils_error(
+ error);
+ }
}
+
if (c == '\0') {
error = parserutils_buffer_append(tokeniser->buffer,
u_fffd, sizeof(u_fffd));
diff --git a/test/data/tokeniser2/INDEX b/test/data/tokeniser2/INDEX
index 1f6ea93..96daa2a 100644
--- a/test/data/tokeniser2/INDEX
+++ b/test/data/tokeniser2/INDEX
@@ -4,7 +4,7 @@
test1.test html5lib tests (part 1)
test2.test html5lib tests (part 2)
-#test3.test html5lib tests (part 3)
+test3.test html5lib tests (part 3)
#test4.test html5lib tests (part 4)
#contentModelFlags.test html5lib content model tests
#entities.test html5lib entity tests
--
HTML5 parser library