Skip to content

Commit

Permalink
fixed upper cases
Browse files Browse the repository at this point in the history
  • Loading branch information
thoughtsunificator committed Oct 18, 2021
1 parent e65a6aa commit 7aed920
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 1 deletion.
2 changes: 1 addition & 1 deletion src/tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class Tokenizer {
&& (!lastToken || Tokenizer.SEPARATOR_CHARACTERS.includes(lastToken.buffer.slice(-1)))) {
tokens.push(new Token(Token.TYPE.USER, buffer, index - buffer.length + 1))
buffer = ""
} else if(!Tokenizer.USER_ALLOWED_CHARACTERS.includes(nextCharacter) && !Tokenizer.CHANNEL_ALLOWED_CHARACTERS.includes(nextCharacter)) {
} else if(nextCharacter === null || (!Tokenizer.USER_ALLOWED_CHARACTERS.includes(nextCharacter.toLowerCase()) && !Tokenizer.CHANNEL_ALLOWED_CHARACTERS.includes(nextCharacter.toLowerCase()))) {
if(lastToken && lastToken.type === Token.TYPE.TEXT) {
let newBuffer = tokens[tokens.length - 1].buffer + buffer
let bufferIndex = index - buffer.length - tokens[tokens.length - 1].buffer.length + 1
Expand Down
27 changes: 27 additions & 0 deletions test/parser.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,33 @@ describe("parser", () => {
assert.deepEqual(message_____.channels, [])
assert.deepEqual(message_____.users, [])


const message______ = Parser.parse(`#fFSFSD #dsadsaDas #dFsads @dsaFGDSG`)

assert.strictEqual(message______.fragments[0].type, "CHANNEL")
assert.strictEqual(message______.fragments[0].content, "#fFSFSD")

assert.strictEqual(message______.fragments[1].type, "TEXT")
assert.strictEqual(message______.fragments[1].content, " ")

assert.strictEqual(message______.fragments[2].type, "CHANNEL")
assert.strictEqual(message______.fragments[2].content, "#dsadsaDas")

assert.strictEqual(message______.fragments[3].type, "TEXT")
assert.strictEqual(message______.fragments[3].content, " ")

assert.strictEqual(message______.fragments[4].type, "CHANNEL")
assert.strictEqual(message______.fragments[4].content, "#dFsads")

assert.strictEqual(message______.fragments[5].type, "TEXT")
assert.strictEqual(message______.fragments[5].content, " ")

assert.strictEqual(message______.fragments[6].type, "USER")
assert.strictEqual(message______.fragments[6].content, "@dsaFGDSG")

assert.deepEqual(message______.channels, ["fFSFSD", "dsadsaDas", "dFsads"])
assert.deepEqual(message______.users, ["dsaFGDSG"])

})

})
30 changes: 30 additions & 0 deletions test/tokenizer.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,36 @@ describe("tokenize", () => {
assert.strictEqual(tokens_____[0].buffer, "@dsadsa#dsads")
assert.strictEqual(tokens_____[0].bufferIndex, 0)

const tokens______ = Tokenizer.tokenize(`#fFSFSD #dsadsaDas #dFsads @dsaFGDSG`)

assert.strictEqual(tokens______[0].type, "CHANNEL")
assert.strictEqual(tokens______[0].buffer, "#fFSFSD")
assert.strictEqual(tokens______[0].bufferIndex, 0)

assert.strictEqual(tokens______[1].type, "TEXT")
assert.strictEqual(tokens______[1].buffer, " ")
assert.strictEqual(tokens______[1].bufferIndex, 7)

assert.strictEqual(tokens______[2].type, "CHANNEL")
assert.strictEqual(tokens______[2].buffer, "#dsadsaDas")
assert.strictEqual(tokens______[2].bufferIndex, 8)

assert.strictEqual(tokens______[3].type, "TEXT")
assert.strictEqual(tokens______[3].buffer, " ")
assert.strictEqual(tokens______[3].bufferIndex, 18)

assert.strictEqual(tokens______[4].type, "CHANNEL")
assert.strictEqual(tokens______[4].buffer, "#dFsads")
assert.strictEqual(tokens______[4].bufferIndex, 19)

assert.strictEqual(tokens______[5].type, "TEXT")
assert.strictEqual(tokens______[5].buffer, " ")
assert.strictEqual(tokens______[5].bufferIndex, 26)

assert.strictEqual(tokens______[6].type, "USER")
assert.strictEqual(tokens______[6].buffer, "@dsaFGDSG")
assert.strictEqual(tokens______[6].bufferIndex, 27)

})

})

0 comments on commit 7aed920

Please sign in to comment.