mirror of https://github.com/CIRCL/AIL-framework
				
				
				
			Merge branch 'master' into typo
						commit
						9d78721f7a
					
				|  | @ -209,8 +209,6 @@ function launching_scripts { | |||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "Decoder" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Decoder.py; read x" | ||||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x" | ||||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "Keys" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Keys.py; read x" | ||||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x" | ||||
|  | @ -220,6 +218,11 @@ function launching_scripts { | |||
|     screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x" | ||||
|     sleep 0.1 | ||||
| 
 | ||||
|     screen -S "Script_AIL" -X screen -t "Hosts" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Hosts.py; read x" | ||||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x" | ||||
|     sleep 0.1 | ||||
| 
 | ||||
|     screen -S "Script_AIL" -X screen -t "Urls" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Urls.py; read x" | ||||
|     sleep 0.1 | ||||
|     screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./SQLInjectionDetection.py; read x" | ||||
|  |  | |||
|  | @ -48,8 +48,6 @@ class Sync_module(AbstractModule): | |||
| 
 | ||||
|     def compute(self, message): | ||||
| 
 | ||||
|         print(message) | ||||
| 
 | ||||
|         ### REFRESH DICT | ||||
|         if self.last_refresh < ail_2_ail.get_last_updated_sync_config(): | ||||
|             self.last_refresh = time.time() | ||||
|  | @ -71,17 +69,16 @@ class Sync_module(AbstractModule): | |||
|                 tags = obj.get_tags(r_set=True) | ||||
| 
 | ||||
|             # check filter + tags | ||||
|             #print(message) | ||||
|             for queue_uuid in self.dict_sync_queues: | ||||
|                 filter_tags = self.dict_sync_queues[queue_uuid]['filter'] | ||||
|                 print(tags) | ||||
|                 print(filter_tags) | ||||
|                 print(tags.issubset(filter_tags)) | ||||
|                 if filter_tags and tags: | ||||
|                     if tags.issubset(filter_tags): | ||||
|                     #print(f'tags: {tags} filter: {filter_tags}') | ||||
|                     if filter_tags.issubset(tags): | ||||
|                         obj_dict = obj.get_default_meta() | ||||
|                         # send to queue push and/or pull | ||||
|                         for dict_ail in self.dict_sync_queues[queue_uuid]['ail_instances']: | ||||
| 
 | ||||
|                             print(f'ail_uuid: {dict_ail["ail_uuid"]} obj: {message}') | ||||
|                             ail_2_ail.add_object_to_sync_queue(queue_uuid, dict_ail['ail_uuid'], obj_dict, | ||||
|                                                             push=dict_ail['push'], pull=dict_ail['pull']) | ||||
| 
 | ||||
|  |  | |||
|  | @ -1028,8 +1028,9 @@ def api_create_sync_queue(json_dict): | |||
|     tags = json_dict.get('tags') | ||||
|     if not tags: | ||||
|         return {"status": "error", "reason": "no tags provided"}, 400 | ||||
|     if not Tag.are_enabled_tags(tags): | ||||
|         return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 | ||||
|     # FIXME: add custom tags | ||||
|     # if not Tag.are_enabled_tags(tags): | ||||
|     #     return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 | ||||
| 
 | ||||
|     max_size = json_dict.get('max_size') | ||||
|     if not max_size: | ||||
|  | @ -1064,8 +1065,9 @@ def api_edit_sync_queue(json_dict): | |||
| 
 | ||||
|     tags = json_dict.get('tags') | ||||
|     if tags: | ||||
|         if not Tag.are_enabled_tags(tags): | ||||
|             return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 | ||||
|         # FIXME: add custom tags | ||||
|         # if not Tag.are_enabled_tags(tags): | ||||
|         #     return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 | ||||
|         edit_sync_queue_filter_tags(queue_uuid, tags) | ||||
| 
 | ||||
|     max_size = json_dict.get('max_size') | ||||
|  |  | |||
|  | @ -20,6 +20,16 @@ sys.path.append(os.environ['AIL_BIN']) | |||
| # Import Project packages | ||||
| ################################## | ||||
| from core import ail_2_ail | ||||
| from lib.ConfigLoader import ConfigLoader | ||||
| 
 | ||||
| config_loader = ConfigLoader() | ||||
| local_addr = config_loader.get_config_str('AIL_2_AIL', 'local_addr') | ||||
| if not local_addr or local_addr == None: | ||||
|     local_addr = None | ||||
| else: | ||||
|     local_addr = (local_addr, 0) | ||||
| config_loader = None | ||||
| 
 | ||||
| 
 | ||||
| #### LOGS #### | ||||
| redis_logger = publisher | ||||
|  | @ -68,10 +78,9 @@ async def push(websocket, ail_uuid): | |||
|             Obj, queue_uuid = ail_2_ail.get_sync_queue_object_and_queue_uuid(ail_uuid) | ||||
|             if Obj: | ||||
|                 obj_ail_stream = ail_2_ail.create_ail_stream(Obj) | ||||
|                 print(obj_ail_stream['meta']) | ||||
|                 obj_ail_stream = json.dumps(obj_ail_stream) | ||||
| 
 | ||||
|                 sys.stdout.write(obj_ail_stream) | ||||
| 
 | ||||
|                 # send objects | ||||
|                 await websocket.send(obj_ail_stream) | ||||
|                 await asyncio.sleep(0.1) | ||||
|  | @ -112,6 +121,7 @@ async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None, client_id=No | |||
|         async with websockets.connect( | ||||
|             uri, | ||||
|             ssl=ssl_context, | ||||
|             local_addr=local_addr, | ||||
|             #open_timeout=10, websockers 10.0 /!\ python>=3.7 | ||||
|             extra_headers={"Authorization": f"{ail_key}"} | ||||
|         ) as websocket: | ||||
|  |  | |||
|  | @ -17,6 +17,12 @@ sys.path.append(os.environ['AIL_BIN']) | |||
| ################################## | ||||
| from pubsublogger import publisher | ||||
| from core import ail_2_ail | ||||
| from lib.ConfigLoader import ConfigLoader | ||||
| 
 | ||||
| config_loader = ConfigLoader() | ||||
| host = config_loader.get_config_str('AIL_2_AIL', 'server_host') | ||||
| port = config_loader.get_config_int('AIL_2_AIL', 'server_port') | ||||
| config_loader = None | ||||
| 
 | ||||
| # # TODO: refactor logging | ||||
| #### LOGS #### | ||||
|  | @ -303,9 +309,6 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol): | |||
| 
 | ||||
| if __name__ == '__main__': | ||||
| 
 | ||||
|     host = '0.0.0.0' | ||||
|     port = 4443 | ||||
| 
 | ||||
|     print('Launching Server...') | ||||
|     redis_logger.info('Launching Server...') | ||||
| 
 | ||||
|  | @ -315,7 +318,7 @@ if __name__ == '__main__': | |||
|     cert_dir = os.environ['AIL_FLASK'] | ||||
|     ssl_context.load_cert_chain(certfile=os.path.join(cert_dir, 'server.crt'), keyfile=os.path.join(cert_dir, 'server.key')) | ||||
| 
 | ||||
|     start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol) | ||||
|     start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol, max_size=None) | ||||
| 
 | ||||
|     print(f'Server Launched:    wss://{host}:{port}') | ||||
|     redis_logger.info(f'Server Launched:    wss://{host}:{port}') | ||||
|  |  | |||
|  | @ -0,0 +1,59 @@ | |||
| #!/usr/bin/env python3 | ||||
| # -*-coding:UTF-8 -* | ||||
| """ | ||||
| The JSON Receiver Module | ||||
| ================ | ||||
| 
 | ||||
| Recieve Json Items (example: Twitter feeder) | ||||
| 
 | ||||
| """ | ||||
| import os | ||||
| import json | ||||
| import sys | ||||
| import datetime | ||||
| 
 | ||||
| sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) | ||||
| import item_basic | ||||
| import Username | ||||
| 
 | ||||
| sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer')) | ||||
| from Default_json import Default_json | ||||
| 
 | ||||
| class Ail_feeder_telegram(Default_json): | ||||
|     """Twitter Feeder functions""" | ||||
| 
 | ||||
|     def __init__(self, name, json_item): | ||||
|         super().__init__(name, json_item) | ||||
| 
 | ||||
|     def get_feeder_name(self): | ||||
|         return 'telegram' | ||||
| 
 | ||||
|     # define item id | ||||
|     def get_item_id(self): | ||||
|         # use twitter timestamp ? | ||||
|         item_date = datetime.date.today().strftime("%Y/%m/%d") | ||||
|         channel_id = str(self.json_item['meta']['channel_id']) | ||||
|         message_id = str(self.json_item['meta']['message_id']) | ||||
|         item_id = f'{channel_id}_{message_id}' | ||||
|         return os.path.join('telegram', item_date, item_id) + '.gz' | ||||
| 
 | ||||
|     def process_json_meta(self, process, item_id): | ||||
|         ''' | ||||
|         Process JSON meta filed. | ||||
|         ''' | ||||
|         channel_id = str(self.json_item['meta']['channel_id']) | ||||
|         message_id = str(self.json_item['meta']['message_id']) | ||||
|         telegram_id = f'{channel_id}_{message_id}' | ||||
|         item_basic.add_map_obj_id_item_id(telegram_id, item_id, 'telegram_id') | ||||
|         #print(self.json_item['meta']) | ||||
|         username = None | ||||
|         if self.json_item['meta'].get('user'): | ||||
|             username = str(self.json_item['meta']['user']) | ||||
|         else: | ||||
|             if self.json_item['meta'].get('channel'): | ||||
|                 username = str(self.json_item['meta']['channel']['username']) | ||||
|         if username: | ||||
|             #print(username) | ||||
|             item_date = item_basic.get_item_date(item_id) | ||||
|             Username.save_item_correlation('telegram', username, item_id, item_date) | ||||
|         return None | ||||
|  | @ -116,6 +116,9 @@ def is_domain_root(item_id): | |||
|             else: | ||||
|                 return True | ||||
| 
 | ||||
| def get_item_url(item_id): | ||||
|     return r_serv_metadata.hget(f'paste_metadata:{item_id}', 'real_link') | ||||
| 
 | ||||
| def get_nb_children(item_id): | ||||
|     return r_serv_metadata.scard('paste_children:{}'.format(item_id)) | ||||
| 
 | ||||
|  | @ -166,7 +169,7 @@ def add_item_parent(parent_item_id, item_id): | |||
| #### UNKNOW SECTION #### | ||||
| 
 | ||||
| def get_obj_id_item_id(parent_type, parent_id): | ||||
|     all_parents_type = ['twitter_id', 'jabber_id'] | ||||
|     all_parents_type = ['twitter_id', 'jabber_id', 'telegram_id'] | ||||
|     if parent_type in all_parents_type: | ||||
|         return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id) | ||||
|     else: | ||||
|  | @ -177,6 +180,8 @@ def add_map_obj_id_item_id(obj_id, item_id, obj_type): | |||
|         r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id) | ||||
|     if obj_type == 'jabber_id': | ||||
|         r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id) | ||||
|     if obj_type == 'telegram_id': | ||||
|         r_serv_metadata.hset('map:telegram_id:item_id', obj_id, item_id) | ||||
| 
 | ||||
| # delete twitter id | ||||
| 
 | ||||
|  |  | |||
|  | @ -53,37 +53,38 @@ class DomClassifier(AbstractModule): | |||
| 
 | ||||
| 
 | ||||
|     def compute(self, message, r_result=False): | ||||
|         item = Item(message) | ||||
|         host, id = message.split() | ||||
| 
 | ||||
|         item_content = item.get_content() | ||||
|         item = Item(id) | ||||
|         item_basename = item.get_basename() | ||||
|         item_date = item.get_date() | ||||
|         item_source = item.get_source() | ||||
|         try: | ||||
|             mimetype = item_basic.get_item_mimetype(item.get_id()) | ||||
| 
 | ||||
|             if mimetype.split('/')[0] == "text": | ||||
|                 self.c.text(rawtext=item_content) | ||||
|                 self.c.potentialdomain() | ||||
|                 self.c.validdomain(passive_dns=True, extended=False) | ||||
|                 #self.redis_logger.debug(self.c.vdomain) | ||||
|             self.c.text(rawtext=host) | ||||
|             print(self.c.domain) | ||||
|             self.c.validdomain(passive_dns=True, extended=False) | ||||
|             #self.redis_logger.debug(self.c.vdomain) | ||||
| 
 | ||||
|                 if self.c.vdomain and d4.is_passive_dns_enabled(): | ||||
|                     for dns_record in self.c.vdomain: | ||||
|                         self.send_message_to_queue(dns_record) | ||||
|             print(self.c.vdomain) | ||||
|             print() | ||||
| 
 | ||||
|                 localizeddomains = self.c.include(expression=self.cc_tld) | ||||
|                 if localizeddomains: | ||||
|                     print(localizeddomains) | ||||
|                     self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{item.get_id()}") | ||||
|             if self.c.vdomain and d4.is_passive_dns_enabled(): | ||||
|                 for dns_record in self.c.vdomain: | ||||
|                     self.send_message_to_queue(dns_record) | ||||
| 
 | ||||
|                 localizeddomains = self.c.localizedomain(cc=self.cc) | ||||
|                 if localizeddomains: | ||||
|                     print(localizeddomains) | ||||
|                     self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{item.get_id()}") | ||||
|             localizeddomains = self.c.include(expression=self.cc_tld) | ||||
|             if localizeddomains: | ||||
|                 print(localizeddomains) | ||||
|                 self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{item.get_id()}") | ||||
| 
 | ||||
|                 if r_result: | ||||
|                     return self.c.vdomain | ||||
|             localizeddomains = self.c.localizedomain(cc=self.cc) | ||||
|             if localizeddomains: | ||||
|                 print(localizeddomains) | ||||
|                 self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{item.get_id()}") | ||||
| 
 | ||||
|             if r_result: | ||||
|                 return self.c.vdomain | ||||
| 
 | ||||
|         except IOError as err: | ||||
|             self.redis_logger.error(f"Duplicate;{item_source};{item_date};{item_basename};CRC Checksum Failed") | ||||
|  |  | |||
|  | @ -0,0 +1,77 @@ | |||
| #!/usr/bin/env python3 | ||||
| # -*-coding:UTF-8 -* | ||||
| 
 | ||||
| """ | ||||
| The Hosts Module | ||||
| ====================== | ||||
| 
 | ||||
| This module is consuming the Redis-list created by the Global module. | ||||
| 
 | ||||
| It is looking for Hosts | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
| ################################## | ||||
| # Import External packages | ||||
| ################################## | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| sys.path.append(os.environ['AIL_BIN']) | ||||
| ################################## | ||||
| # Import Project packages | ||||
| ################################## | ||||
| from modules.abstract_module import AbstractModule | ||||
| from lib.ConfigLoader import ConfigLoader | ||||
| from lib import regex_helper | ||||
| #from lib.objects.Items import Item | ||||
| from packages.Item import Item | ||||
| 
 | ||||
| class Hosts(AbstractModule): | ||||
|     """ | ||||
|     Hosts module for AIL framework | ||||
|     """ | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         super(Hosts, self).__init__() | ||||
| 
 | ||||
|         config_loader = ConfigLoader() | ||||
|         self.r_cache = config_loader.get_redis_conn("Redis_Cache") | ||||
| 
 | ||||
|         self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name) | ||||
| 
 | ||||
|         # regex timeout | ||||
|         self.regex_timeout = 30 | ||||
| 
 | ||||
|         # Waiting time in secondes between to message proccessed | ||||
|         self.pending_seconds = 1 | ||||
| 
 | ||||
|         self.host_regex = r'\b([a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)\b' | ||||
|         re.compile(self.host_regex) | ||||
| 
 | ||||
|         self.redis_logger.info(f"Module: {self.module_name} Launched") | ||||
| 
 | ||||
| 
 | ||||
|     def compute(self, message): | ||||
|         item = Item(message) | ||||
| 
 | ||||
|         # mimetype = item_basic.get_item_mimetype(item.get_id()) | ||||
|         # if mimetype.split('/')[0] == "text": | ||||
| 
 | ||||
|         content = item.get_content() | ||||
| 
 | ||||
|         hosts = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.host_regex, item.get_id(), content) | ||||
|         for host in hosts: | ||||
|             #print(host) | ||||
| 
 | ||||
|             msg = f'{host} {item.get_id()}' | ||||
|             self.send_message_to_queue(msg, 'Host') | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
| 
 | ||||
|     module = Hosts() | ||||
|     module.run() | ||||
|  | @ -54,7 +54,7 @@ class Tags(AbstractModule): | |||
|             self.send_message_to_queue(message, 'MISP_The_Hive_feeder') | ||||
| 
 | ||||
|             message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}' | ||||
|             self.send_message_to_queue(message, 'Sync_module') | ||||
|             self.send_message_to_queue(message, 'SyncModule') | ||||
| 
 | ||||
|         else: | ||||
|             # Malformed message | ||||
|  |  | |||
|  | @ -102,6 +102,7 @@ class AbstractModule(ABC): | |||
|                     self.compute(message) | ||||
|                 except Exception as err: | ||||
|                     trace = traceback.format_tb(err.__traceback__) | ||||
|                     trace = ''.join(trace) | ||||
|                     self.redis_logger.critical(f"Error in module {self.module_name}: {err}") | ||||
|                     self.redis_logger.critical(f"Module {self.module_name} input message: {message}") | ||||
|                     self.redis_logger.critical(trace) | ||||
|  | @ -109,8 +110,7 @@ class AbstractModule(ABC): | |||
|                     print(f"ERROR: {err}") | ||||
|                     print(f'MESSAGE: {message}') | ||||
|                     print('TRACEBACK:') | ||||
|                     for line in trace: | ||||
|                         print(line) | ||||
|                     print(trace) | ||||
|                 # remove from set_module | ||||
|                 ## check if item process == completed | ||||
| 
 | ||||
|  |  | |||
|  | @ -22,8 +22,12 @@ subscribe = Redis_Duplicate | |||
| [Indexer] | ||||
| subscribe = Redis_Global | ||||
| 
 | ||||
| [DomClassifier] | ||||
| [Hosts] | ||||
| subscribe = Redis_Global | ||||
| publish = Redis_Host | ||||
| 
 | ||||
| [DomClassifier] | ||||
| subscribe = Redis_Host | ||||
| publish = Redis_D4_client | ||||
| 
 | ||||
| [D4_client] | ||||
|  |  | |||
|  | @ -57,6 +57,11 @@ minute_processed_paste = 10 | |||
| #Maximum line length authorized to make a diff between duplicates | ||||
| DiffMaxLineLength = 10000 | ||||
| 
 | ||||
| [AIL_2_AIL] | ||||
| server_host = 0.0.0.0 | ||||
| server_port = 4443 | ||||
| local_addr =  | ||||
| 
 | ||||
| #### Modules #### | ||||
| [BankAccount] | ||||
| max_execution_time = 60 | ||||
|  |  | |||
|  | @ -14,9 +14,6 @@ sudo apt-get install python3-pip virtualenv python3-dev python3-tk libfreetype6- | |||
| #Needed for downloading jemalloc | ||||
| sudo apt-get install wget -qq | ||||
| 
 | ||||
| #optional tor install | ||||
| sudo apt-get install tor -qq | ||||
| 
 | ||||
| #Needed for bloom filters | ||||
| sudo apt-get install libssl-dev libfreetype6-dev python3-numpy -qq | ||||
| 
 | ||||
|  |  | |||
|  | @ -59,9 +59,10 @@ class Test_Module_Categ(unittest.TestCase): | |||
| 
 | ||||
|     def test_module(self): | ||||
|         item_id = 'tests/2021/01/01/categ.gz' | ||||
|         test_categ = ['CreditCards', 'Mail', 'Onion', 'Web', 'Credential', 'Cve'] | ||||
|         test_categ = ['CreditCards', 'Mail', 'Onion', 'Urls', 'Credential', 'Cve'] | ||||
| 
 | ||||
|         result = self.module_obj.compute(item_id, r_result=True) | ||||
|         print(result) | ||||
|         self.assertCountEqual(result, test_categ) | ||||
| 
 | ||||
| class Test_Module_CreditCards(unittest.TestCase): | ||||
|  | @ -87,8 +88,10 @@ class Test_Module_DomClassifier(unittest.TestCase): | |||
|         self.module_obj = DomClassifier() | ||||
| 
 | ||||
|     def test_module(self): | ||||
|         test_host = 'foo.be' | ||||
|         item_id = 'tests/2021/01/01/domain_classifier.gz' | ||||
|         result = self.module_obj.compute(item_id, r_result=True) | ||||
|         msg = f'{test_host} {item_id}' | ||||
|         result = self.module_obj.compute(msg, r_result=True) | ||||
|         self.assertTrue(len(result)) | ||||
| 
 | ||||
| class Test_Module_Global(unittest.TestCase): | ||||
|  |  | |||
|  | @ -35,6 +35,7 @@ | |||
| </div> | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| <!-- | ||||
|         <button class="btn btn-primary" onclick="tagsSelector()"> | ||||
|             <i class="fas fa-plus"></i> | ||||
|  | @ -47,7 +48,7 @@ | |||
| var ltags; | ||||
| var ltagsgalaxies; | ||||
| 
 | ||||
| $.getJSON("{{ url_for('tags_ui.tag_taxonomies_tags_enabled_json') }}", | ||||
| $.getJSON("{{ url_for('Tags.get_all_tags') }}", | ||||
|     function(data) { | ||||
|       {% if 'taxonomies_tags' in tags_selector_data %} | ||||
|         var valueData = [ | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue
	
	 David Cruciani
						David Cruciani