diff options
author | Jon Skeet <jonskeet@google.com> | 2015-11-23 16:21:47 +0000 |
---|---|---|
committer | Jon Skeet <jonskeet@google.com> | 2015-12-02 07:54:34 +0000 |
commit | 3de2fced6be1cc5e8f321c5aee2bb43176be962a (patch) | |
tree | 2942a336c3eb43c792520bbfd75006f2b2f4c6a6 /csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs | |
parent | 567579b50517e4f7efc459ab1d9d5ee2577af024 (diff) | |
download | protobuf-3de2fced6be1cc5e8f321c5aee2bb43176be962a.tar.gz protobuf-3de2fced6be1cc5e8f321c5aee2bb43176be962a.tar.bz2 protobuf-3de2fced6be1cc5e8f321c5aee2bb43176be962a.zip |
Handle JSON parsing for Any.
This required a rework of the tokenizer to allow for a "replaying" tokenizer, basically in case the @type value comes after the data itself. This rework is nice in some ways (all the pushback and object depth logic in one place) but is a little fragile in terms of token push-back when using the replay tokenizer. It'll be fine for the scenario we need it for, but we should be careful...
Diffstat (limited to 'csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs')
-rw-r--r-- | csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs b/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs index a38efeed..a0a62227 100644 --- a/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs +++ b/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs @@ -85,7 +85,7 @@ namespace Google.Protobuf public void ObjectDepth() { string json = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }"; - var tokenizer = new JsonTokenizer(new StringReader(json)); + var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it. Assert.AreEqual(0, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); @@ -118,7 +118,7 @@ namespace Google.Protobuf public void ObjectDepth_WithPushBack() { string json = "{}"; - var tokenizer = new JsonTokenizer(new StringReader(json)); + var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); Assert.AreEqual(0, tokenizer.ObjectDepth); var token = tokenizer.Next(); Assert.AreEqual(1, tokenizer.ObjectDepth); @@ -275,7 +275,7 @@ namespace Google.Protobuf // Note: we don't test that the earlier tokens are exactly as expected, // partly because that's hard to parameterize. var reader = new StringReader(json.Replace('\'', '"')); - var tokenizer = new JsonTokenizer(reader); + var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedValidTokens; i++) { Assert.IsNotNull(tokenizer.Next()); @@ -334,7 +334,7 @@ namespace Google.Protobuf [Test] public void NextAfterEndDocumentThrows() { - var tokenizer = new JsonTokenizer(new StringReader("null")); + var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null")); Assert.AreEqual(JsonToken.Null, tokenizer.Next()); Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); Assert.Throws<InvalidOperationException>(() => tokenizer.Next()); @@ -343,7 +343,7 @@ namespace Google.Protobuf [Test] public void CanPushBackEndDocument() { - var tokenizer = new JsonTokenizer(new StringReader("null")); + var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null")); Assert.AreEqual(JsonToken.Null, tokenizer.Next()); Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); tokenizer.PushBack(JsonToken.EndDocument); @@ -373,7 +373,7 @@ namespace Google.Protobuf private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens) { var reader = new StringReader(json); - var tokenizer = new JsonTokenizer(reader); + var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedTokens.Length; i++) { var actualToken = tokenizer.Next(); @@ -393,7 +393,7 @@ namespace Google.Protobuf private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens) { var reader = new StringReader(json); - var tokenizer = new JsonTokenizer(reader); + var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedTokens.Length; i++) { var actualToken = tokenizer.Next(); |