Skip to content

Commit

Permalink
Merge pull request #288 from X0RW3LL/master
Browse files Browse the repository at this point in the history
Fix python 3.12 syntax warnings
  • Loading branch information
saeeddhqan authored Aug 26, 2024
2 parents 0440c02 + 1bfdcbe commit e40c509
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion maryam/core/util/helpers/web_scrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def link_category(self, urls):
if cond1:
continue

join = str(join).replace('\/', '/')
join = str(join).replace(r'\/', '/')
##########################
# ADD OUT SCOPE
##########################
Expand Down
2 changes: 1 addition & 1 deletion maryam/core/util/iris/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def remove_stopwords(self, text):
return [x for x in text if x not in stops]

def tokenize_and_stem(self, text):
tokens = re.findall("[A-Z]{2,}(?![a-z])|[A-Z][a-z]+(?=[A-Z])|[\'\w\-]+",text)
tokens = re.findall(r"[A-Z]{2,}(?![a-z])|[A-Z][a-z]+(?=[A-Z])|[\'\w\-]+",text)
filtered_tokens = []
for token in tokens:
if re.search('[a-zA-Z]', token):
Expand Down
2 changes: 1 addition & 1 deletion maryam/modules/footprint/crawl_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
('debug', False, False, 'debug the scraper', '--debug', 'store_true', bool),
('thread', 1, False, 'The number of links that open per round', '-t', 'store', int),
),
'examples': ('crawl_pages -d <DOMAIN> -r "https?://[A-z0-9\./]+"\
'examples': (r'crawl_pages -d <DOMAIN> -r "https?://[A-z0-9\./]+"\
--output', 'crawl_pages -d <DOMAIN> --limit 2 --more')
}

Expand Down
2 changes: 1 addition & 1 deletion maryam/modules/osint/cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def module_api(self):
self.thread(search, self.options['thread'], engine, query, q_formats, limit, count, meta['sources'])

output['links'] += list( self.reglib().filter(r"https?://([\w\-\.]+\.)?"\
+ site_url.replace('.','\.')+"/", list( set(LINKS) ) ) ) #escaping . for regex search using replace()
+ site_url.replace('.',r'\.')+"/", list( set(LINKS) ) ) ) #escaping . for regex search using replace()

self.save_gather(output, 'osint/cloud_storage', query, output=self.options.get('output'))
return output
Expand Down

0 comments on commit e40c509

Please sign in to comment.