wesnoth-cvs-commits
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Wesnoth-cvs-commits] wesnoth/src/serialization tokenizer.cpp tokeniz...


From: Guillaume Melquiond
Subject: [Wesnoth-cvs-commits] wesnoth/src/serialization tokenizer.cpp tokeniz...
Date: Sat, 07 May 2005 11:51:13 -0400

CVSROOT:        /cvsroot/wesnoth
Module name:    wesnoth
Branch:         
Changes by:     Guillaume Melquiond <address@hidden>    05/05/07 15:51:13

Modified files:
        src/serialization: tokenizer.cpp tokenizer.hpp 

Log message:
        Add support for inlined preprocessed directives. Use an illegal UTF8 
subchar as start marker.

CVSWeb URLs:
http://savannah.gnu.org/cgi-bin/viewcvs/wesnoth/wesnoth/src/serialization/tokenizer.cpp.diff?tr1=1.9&tr2=1.10&r1=text&r2=text
http://savannah.gnu.org/cgi-bin/viewcvs/wesnoth/wesnoth/src/serialization/tokenizer.hpp.diff?tr1=1.3&tr2=1.4&r1=text&r2=text

Patches:
Index: wesnoth/src/serialization/tokenizer.cpp
diff -u wesnoth/src/serialization/tokenizer.cpp:1.9 
wesnoth/src/serialization/tokenizer.cpp:1.10
--- wesnoth/src/serialization/tokenizer.cpp:1.9 Wed Apr 27 22:17:37 2005
+++ wesnoth/src/serialization/tokenizer.cpp     Sat May  7 15:51:13 2005
@@ -1,4 +1,4 @@
-/* $Id: tokenizer.cpp,v 1.9 2005/04/27 22:17:37 silene Exp $ */
+/* $Id: tokenizer.cpp,v 1.10 2005/05/07 15:51:13 silene Exp $ */
 /*
    Copyright (C) 2004 by Philippe Plantier <address@hidden>
    Part of the Battle for Wesnoth Project http://www.wesnoth.org
@@ -30,35 +30,47 @@
        }
 }
 
+void tokenizer::skip_comment()
+{
+       // Dump comments up to \n
+       std::string comment;
+       next_char();
+       while (current_ != EOF && current_ != '\n') {
+               comment += current_;
+               next_char();
+       }
+
+       // Identifies and processes tokenizer directives
+       std::vector<std::string> comment_line = utils::split(comment, ' ');
+       if (comment_line.size() == 2 && comment_line[0] == "textdomain")
+               textdomain_ = comment_line[1];
+       else if (comment_line.size() > 3 && comment_line[0] == "line") {
+               lineno_ = atoi(comment_line[1].c_str());
+               comment_line.erase(comment_line.begin(), comment_line.begin() + 
2);
+               file_ = ' ' + utils::join(comment_line, ' ');
+       }
+}
+
 const token& tokenizer::next_token()
 {
        token_.value = "";
        token_.leading_spaces = "";
 
-       // Dump spaces
-       while(is_space(current_)) {
-               token_.leading_spaces += current_;
+       // Dump spaces and inlined comments
+       for(;;) {
+               while (is_space(current_)) {
+                       token_.leading_spaces += current_;
+                       next_char();
+               }
+               if (current_ != 254)
+                       break;
+               skip_comment();
+               --lineno_;
                next_char();
        }
 
-       // Dump comments up to \n
-       if(current_ == '#') {
-               std::string comment;
-               do {
-                       comment += current_;
-                       next_char();
-               } while(current_ != EOF && current_ != '\n');
-
-               // Identifies and processes tokenizer directives
-               std::vector<std::string> comment_line = utils::split(comment, ' 
');
-               if (comment_line.size() == 2 && comment_line[0] == 
"#textdomain")
-                       textdomain_ = comment_line[1];
-               else if (comment_line.size() > 3 && comment_line[0] == "#line") 
{
-                       lineno_ = atoi(comment_line[1].c_str());
-                       comment_line.erase(comment_line.begin(), 
comment_line.begin() + 2);
-                       file_ = ' ' + utils::join(comment_line, ' ');
-               }
-       } 
+       if (current_ == '#')
+               skip_comment();
 
        tokenstart_lineno_ = lineno_;
 
@@ -79,6 +91,10 @@
                                break;
                        if(current_ == '"' && peek_char() == '"')
                                next_char();
+                       if (current_ == 254) {
+                               skip_comment();
+                               continue;
+                       }
 
                        token_.value += current_;
                };
Index: wesnoth/src/serialization/tokenizer.hpp
diff -u wesnoth/src/serialization/tokenizer.hpp:1.3 
wesnoth/src/serialization/tokenizer.hpp:1.4
--- wesnoth/src/serialization/tokenizer.hpp:1.3 Wed Apr 27 22:17:37 2005
+++ wesnoth/src/serialization/tokenizer.hpp     Sat May  7 15:51:13 2005
@@ -1,4 +1,4 @@
-/* $Id: tokenizer.hpp,v 1.3 2005/04/27 22:17:37 silene Exp $ */
+/* $Id: tokenizer.hpp,v 1.4 2005/05/07 15:51:13 silene Exp $ */
 /*
    Copyright (C) 2004 by Philippe Plantier <address@hidden>
    Part of the Battle for Wesnoth Project http://www.wesnoth.org
@@ -57,6 +57,7 @@
        int peek_char();
        bool is_space(int c);
        bool is_alnum(int c);
+       void skip_comment();
 
        std::istream& in_;
        int current_;




reply via email to

[Prev in Thread] Current Thread [Next in Thread]