Skip to content

Commit

Permalink
Colour vocab mapping cnn added
Browse files Browse the repository at this point in the history
  • Loading branch information
Dhvani Patel committed Jul 25, 2017
1 parent 44cf63d commit cec3fcb
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 23 deletions.
Binary file modified __pycache__/toCheck.pypy-41.pyc
Binary file not shown.
80 changes: 58 additions & 22 deletions cnn_hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,23 @@
import numpy as np
from PIL import Image

global all_tokens
# 87 VOCAB

def set_col_from_json(all_tokens):
with open('vocabulary_color.json') as data_file:
data = json.load(data_file)
#print (len(data["indexes"]))
indexed_tokens = []
for tok in all_tokens:
print (tok)
toCompare = tok.value
indexed_tokens.append(data["indexes"].index(toCompare))
print (len(data["colours"]))
print (len(data["indexes"]))
colours = []
for inds in indexed_tokens:
colours.append(data["colours"][inds])
return colours

def open_closed_tokens(token):
"""
Expand Down Expand Up @@ -49,6 +64,7 @@ def open_closed_tokens(token):
elif token.type in VERBATIM_CLASSES:
# These tokens should be mapped verbatim to their names.
assert ' ' not in token.value
#print (token.value)
return token.value
elif token.type in {'NUMBER', 'STRING'}:
# These tokens should be abstracted.
Expand Down Expand Up @@ -137,6 +153,7 @@ def handle_token(all_tokens):
val = repr(token)[2:len(repr(token))-1]
else:
val = repr(token)[1:len(repr(token))-1]
#print (val)
send = Token(tokenize.tok_name[type], val, srow, scol, erow, ecol, line)
allReturn.append(send)
print ("%d,%d-%d,%d:\t%s\t%s" % \
Expand All @@ -160,48 +177,67 @@ def create(numFile):
#print all_rows[numFile][0]
all_tokens = []
text = (all_rows[numFile][0]).decode('utf-8')
print (type(text))
#print (type(text))
tokenStream = tokenize.generate_tokens(StringIO(text).readline)
print (tokenize.tok_name)
#print (tokenize.tok_name)
for tok in tokenStream:
all_tokens.append([tok.exact_type, tokenize.tok_name[tok.exact_type], tok[2], tok[3], tok[4]])
all_tokens.append([tok.exact_type, tok[1], tok[2], tok[3], tok[4]])
#print (tok)
allGood = handle_token(all_tokens[:])
#print (allGood[21].type)
gotWhat = vocabularize_tokens(allGood, False)
lines = []
maxCol = -1
for tok in allGood:
print (tok.value)
#print (tok.value)
lines.append(tok.srow)
maxComp = tok.ecol
if maxComp > maxCol:
maxCol = maxComp
all_text = (all_rows[numFile][0]).decode()
#print gotWhat[0].value
print (len(all_tokens))
print (len(gotWhat))
print (lines)
#print (len(all_tokens))
#print (len(gotWhat))
#print (lines)
num_lines = len(set(lines))
print (num_lines)
print (maxCol)
#print (num_lines)
#print (maxCol)

imageArrOne = [-1] * maxCol
a = []
for _ in range(num_lines):
a.append(imageArrOne)


#a = np.column_stack((imageArrOne, imageArrTwo))
#print (a.shape)
#imshow([[0], [1], [0]], cmap=get_cmap("Spectral"), interpolation='none')
#show()
cols = set_col_from_json(gotWhat)
#print (cols)
im = Image.new("RGB", (maxCol, num_lines))
pix = im.load()
for x in range(maxCol):
for y in range(num_lines):
pix[x,y] = (255,255,255)
pix[5,6] = (255,0,0)
at = lines[0]
colsComb = []
allCols = []
iterInd = 0
for ind in lines:
if ind == at:
colsComb.append(cols[iterInd])
if iterInd == len(lines)-1:
#print ("HERE")
allCols.append(colsComb)
else:
at = ind
allCols.append(colsComb)
#print (allCols)
#print ("first")
colsComb = []
colsComb.append(cols[iterInd])
#print (iterInd)
#print (ind)
#print (len(lines))
if iterInd == len(lines)-1:
#print ("HERE")
allCols.append(colsComb)
iterInd += 1
print (num_lines)
print (len(allCols))
#print (allCols)
#pix[5,6] = (255,0,0)
im.save("test.png", "PNG")

#print all_text
Expand All @@ -214,6 +250,6 @@ def create(numFile):


if __name__ == '__main__':
create(4)
create(2)
#for x in range(10):
#create(x)
Binary file modified test.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 3 additions & 1 deletion vocabulary_color.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
{"indexes": ["!=", "%", "%=", "&", "&=", "(", ")", "*", "**", "**=", "*=", "+", "+=", ",", "-", "-=", "->", ".", "...", "/", "//", "//=", "/=", ":", ";", "<", "<<", "<<=", "<=", "<IDENTIFIER>", "<NUMBER>", "<STRING>", "=", "==", ">", ">=", ">>", ">>=", "@", "@=", "DEDENT", "False", "INDENT", "NEWLINE", "None", "True", "[", "]", "^", "^=", "and", "as", "assert", "async", "await", "break", "class", "continue", "def", "del", "elif", "else", "except", "finally", "for", "from", "global", "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass", "raise", "return", "try", "while", "with", "yield", "{", "|", "|=", "}", "~"]
{"indexes": ["!=", "%", "%=", "&", "&=", "(", ")", "*", "**", "**=", "*=", "+", "+=", ",", "-", "-=", "->", ".", "...", "/", "//", "//=", "/=", ":", ";", "<", "<<", "<<=", "<=", "<IDENTIFIER>", "<NUMBER>", "<STRING>", "=", "==", ">", ">=", ">>", ">>=", "@", "@=", "DEDENT", "False", "INDENT", "NEWLINE", "None", "True", "[", "]", "^", "^=", "and", "as", "assert", "async", "await", "break", "class", "continue", "def", "del", "elif", "else", "except", "finally", "for", "from", "global", "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass", "raise", "return", "try", "while", "with", "yield", "{", "|", "|=", "}", "~"],

"colours": [[255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255], [255,255,255]]
}

0 comments on commit cec3fcb

Please sign in to comment.