about summary refs log tree commit diff stats
path: root/src/instances
diff options
context:
space:
mode:
Diffstat (limited to 'src/instances')
-rw-r--r--src/instances/cloudflare.json10
-rw-r--r--src/instances/data.json59
-rw-r--r--src/instances/get_instances.py549
-rw-r--r--src/instances/piped.json34
4 files changed, 356 insertions, 296 deletions
diff --git a/src/instances/cloudflare.json b/src/instances/cloudflare.json
index 626c8ec5..ecaae2e9 100644
--- a/src/instances/cloudflare.json
+++ b/src/instances/cloudflare.json
@@ -1,7 +1,13 @@
 [
   "https://invidious.kavin.rocks",
-  "https://invidious-us.kavin.rocks",
   "https://invidious.lunar.icu",
+  "https://invidious-us.kavin.rocks",
+  "https://piped.kavin.rocks",
+  "https://piped.silkky.cloud",
+  "https://piped.tokhmi.xyz",
+  "https://piped.moomoo.me",
+  "https://piped.syncpundit.com",
+  "https://piped.mha.fi",
   "https://send.silkky.cloud",
   "https://nhanh.cloud",
   "https://nitter.domain.glass",
@@ -37,12 +43,12 @@
   "https://wiki.604kph.xyz",
   "https://wikiless.lunar.icu",
   "https://translate.syncpundit.com",
+  "https://lingva.lunar.icu",
   "https://searx.josie.lol",
   "https://searx.kujonello.cf",
   "https://searx.org",
   "https://searx.run",
   "https://searx.tk",
-  "https://s.alefvanoon.xyz",
   "https://search.albony.xyz",
   "https://search.garudalinux.org",
   "https://whoogle.lunar.icu",
diff --git a/src/instances/data.json b/src/instances/data.json
index 64ada2d4..59c0148c 100644
--- a/src/instances/data.json
+++ b/src/instances/data.json
@@ -6,7 +6,6 @@
       "https://invidious.snopyta.org",
       "https://invidious.kavin.rocks",
       "https://inv.riverside.rocks",
-      "https://invidious-us.kavin.rocks",
       "https://invidious.osi.kr",
       "https://y.com.sb",
       "https://tube.cthd.icu",
@@ -15,12 +14,13 @@
       "https://invidious.lunar.icu",
       "https://invidious.mutahar.rocks",
       "https://invidious.sethforprivacy.com",
+      "https://inv.bp.projectsegfau.lt",
       "https://invidious.weblibre.org",
       "https://invidious.esmailelbob.xyz",
       "https://youtube.076.ne.jp",
       "https://invidious.privacy.gd",
-      "https://inv.bp.mutahar.rocks",
-      "https://invidious.namazso.eu"
+      "https://invidious.namazso.eu",
+      "https://invidious-us.kavin.rocks"
     ],
     "tor": [
       "http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
@@ -34,6 +34,23 @@
       "http://euxxcnhsynwmfidvhjf6uzptsmh4dipkmgdmcmxxuo7tunp3ad2jrwyd.onion/"
     ]
   },
+  "piped": {
+    "normal": [
+      "https://piped.kavin.rocks",
+      "https://piped.silkky.cloud",
+      "https://piped.tokhmi.xyz",
+      "https://piped.moomoo.me",
+      "https://il.ax",
+      "https://piped.syncpundit.com",
+      "https://piped.mha.fi",
+      "https://piped.mint.lgbt",
+      "https://piped.privacy.com.de",
+      "https://piped.notyourcomputer.net"
+    ],
+    "tor": [
+      "http://piped2bbch4xslbl2ckr6k62q56kon56ffowxaqzy42ai22a4sash3ad.onion"
+    ]
+  },
   "proxiTok": {
     "normal": [
       "https://proxitok.herokuapp.com",
@@ -113,7 +130,6 @@
       "https://nitter.poast.org",
       "https://nitter.lunar.icu",
       "https://nitter.bird.froth.zone",
-      "https://nitter.drivet.xyz",
       "https://twitter.paranoid.cf"
     ],
     "tor": [
@@ -312,7 +328,8 @@
       "https://lingva.pussthecat.org",
       "https://translate.datatunnel.xyz",
       "https://lingva.esmailelbob.xyz",
-      "https://translate.plausibility.cloud"
+      "https://translate.plausibility.cloud",
+      "https://lingva.lunar.icu"
     ],
     "tor": []
   },
@@ -336,7 +353,6 @@
       "https://engo.mint.lgbt",
       "https://jsearch.pw",
       "https://nibblehole.com",
-      "https://procurx.pt",
       "https://search.antonkling.se",
       "https://search.asynchronousexchange.com",
       "https://search.disroot.org",
@@ -393,6 +409,7 @@
     "tor": [
       "http://w5rl6wsd7mzj4bdkbuqvzidet5osdsm5jhg2f7nvfidakfq5exda5wid.onion",
       "http://4n53nafyi77iplnbrpmxnp3x4exbswwxigujaxy3b37fvr7bvlopxeyd.onion",
+      "http://searxdr3pqz4nydgnqocsia2xbywptxbkympa2emn7zlgggrir4bkfad.onion",
       "http://rq2w52kyrif3xpfihkgjnhqm3a5aqhoikpv72z3drpjglfzc2wr5z4yd.onion",
       "http://searx3aolosaf3urwnhpynlhuokqsgz47si4pzz5hvb7uuzyjncl2tid.onion"
     ],
@@ -402,6 +419,7 @@
       "https://etsi.me",
       "https://northboot.xyz",
       "https://paulgo.io",
+      "https://procurx.pt",
       "https://s.zhaocloud.net",
       "https://search.bus-hit.me",
       "https://search.mdosch.de",
@@ -411,6 +429,7 @@
       "https://search.rabbit-company.com",
       "https://search.vojkovic.xyz",
       "https://search.zzls.xyz",
+      "https://searx.bardia.tech",
       "https://searx.be",
       "https://searx.ebnar.xyz",
       "https://searx.esmailelbob.xyz",
@@ -430,7 +449,7 @@
   "whoogle": {
     "normal": [
       "https://gowogle.voring.me",
-      "https://s.alefvanoon.xyz",
+      "https://s.tokhmi.xyz",
       "https://search.albony.xyz",
       "https://search.garudalinux.org",
       "https://search.sethforprivacy.com",
@@ -466,8 +485,11 @@
     ]
   },
   "peertube": [
+    "https://video.toby3d.me",
+    "https://videos.grafo.zone",
+    "https://tube.bakosi.org",
+    "https://tube.chocoflan.net",
     "https://video.pthreat.co",
-    "https://viet69.ml",
     "https://peertube.egroc.de",
     "https://zcxfruit.ru",
     "https://pt.k2s.sk",
@@ -580,14 +602,12 @@
     "https://tube.moec.top",
     "https://tube.erzbistum-hamburg.de",
     "https://video.germanische-heilkunde.at",
-    "https://pt.irnok.net",
     "https://tubulus.openlatin.org",
     "https://shiotube.f5.si",
     "https://views.southfox.me",
     "https://vide.oxel.me",
     "https://video.mttv.it",
     "https://peertube.cloud.nerdraum.de",
-    "https://v.jrgnsn.net",
     "https://vid.pretok.tv",
     "https://videos.slownewsdayshow.com",
     "https://videos.sarcasmstardust.com",
@@ -651,7 +671,6 @@
     "https://aktivtube.com",
     "https://peertube.offerman.com",
     "https://sneedtube.com",
-    "https://venuse.nastub.cz",
     "https://poast.tv",
     "https://testube.distrilab.fr",
     "https://peertube.rse43.com",
@@ -705,7 +724,6 @@
     "https://watch.rt4mn.org",
     "https://video.lrose.de",
     "https://video.chalec.org",
-    "https://galileo.news",
     "https://dud175.inf.tu-dresden.de",
     "https://peertube.fenarinarsa.com",
     "https://peertube.gardion.de",
@@ -732,7 +750,6 @@
     "https://peertube.aventer.biz",
     "https://video.bhscs2.club",
     "https://videos.rights.ninja",
-    "https://videos.piecemaker.rocks",
     "https://tube.die-rote-front.de",
     "https://v.endpoint.ml",
     "https://pertur.be",
@@ -744,9 +761,7 @@
     "https://open.movie",
     "https://tube.rfc1149.net",
     "https://tube.radiomercure.fr",
-    "https://video.3cmr.fr",
     "https://medias.debrouillonet.org",
-    "https://vid.thatswhathappened.tv",
     "https://peertube.1984.cz",
     "https://tube.sp4ke.com",
     "https://pt.nix.uno",
@@ -793,7 +808,6 @@
     "https://video.interru.io",
     "https://tube.cnr.it",
     "https://peertube.dtmf.ca",
-    "https://tube.tr4sk.me",
     "https://peertube.dk",
     "https://tube.ponsonaille.fr",
     "https://peertube.ares.bioxis-server.fr",
@@ -811,7 +825,6 @@
     "https://peertube.redpill-insight.com",
     "https://tube.22decembre.eu",
     "https://video.berzs.xyz",
-    "https://invoice.peertube.biz",
     "https://pt.sfunk1x.com",
     "https://video.antopie.org",
     "https://vtr.chikichiki.tube",
@@ -1083,7 +1096,6 @@
     "https://tube.pmj.rocks",
     "https://gary.vger.cloud",
     "https://video.guerredeclasse.fr",
-    "https://tube.wehost.lgbt",
     "https://ptmir5.inter21.net",
     "https://ptmir4.inter21.net",
     "https://ptmir3.inter21.net",
@@ -1187,7 +1199,6 @@
     "https://video.p1ng0ut.social",
     "https://watch.deranalyst.ch",
     "https://video.discord-insoumis.fr",
-    "https://peertube.forsud.be",
     "https://video.pcf.fr",
     "https://kumi.tube",
     "https://tube.rsi.cnr.it",
@@ -1510,6 +1521,14 @@
     "https://video.lw1.at",
     "https://www.yiny.org",
     "https://video.typica.us",
-    "https://videos.lescommuns.org"
+    "https://videos.lescommuns.org",
+    "https://dialup.express",
+    "https://peertube.1312.media",
+    "https://skeptikon.fr",
+    "https://video.blueline.mg",
+    "https://tube.homecomputing.fr",
+    "https://video.tedomum.net",
+    "https://video.g3l.org",
+    "https://fontube.fr"
   ]
 }
\ No newline at end of file
diff --git a/src/instances/get_instances.py b/src/instances/get_instances.py
index e8e1952f..7b62a9f7 100644
--- a/src/instances/get_instances.py
+++ b/src/instances/get_instances.py
@@ -67,260 +67,279 @@ def is_cloudflare(url):
     return False
 
 
-# Invidious
-r = requests.get('https://api.invidious.io/instances.json')
-rJson = json.loads(r.text)
-invidiousList = {}
-invidiousList['normal'] = []
-invidiousList['tor'] = []
-for instance in rJson:
-    if instance[1]['type'] == 'https':
-        invidiousList['normal'].append(instance[1]['uri'])
-    elif instance[1]['type'] == 'onion':
-        invidiousList['tor'].append(instance[1]['uri'])
-mightyList['invidious'] = invidiousList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Invidious')
-
-# ProxiTok
-r = requests.get(
-    'https://raw.githubusercontent.com/wiki/pablouser1/ProxiTok/Public-instances.md')
-
-tmp = re.findall(
-    r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|", r.text)
-proxiTokList = {}
-proxiTokList['normal'] = []
-proxiTokList['tor'] = []
-for item in tmp:
-    proxiTokList['normal'].append(item)
-mightyList['proxiTok'] = proxiTokList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'ProxiTok')
-
-# Send
-r = requests.get(
-    'https://gitlab.com/timvisee/send-instances/-/raw/master/README.md')
-tmp = re.findall(
-    r"- ([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}", r.text)
-sendList = {}
-sendList['normal'] = []
-sendList['tor'] = []
-for item in tmp:
-    sendList['normal'].append(item)
-mightyList['send'] = sendList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Send')
-
-# Nitter
-r = requests.get('https://github.com/zedeus/nitter/wiki/Instances')
-soup = BeautifulSoup(r.text, 'html.parser')
-markdownBody = soup.find(class_='markdown-body')
-tables = markdownBody.find_all('table')
-tables.pop(3)
-tables.pop(3)
-nitterList = {}
-nitterList['normal'] = []
-nitterList['tor'] = []
-for table in tables:
-    tbody = table.find('tbody')
-    trs = tbody.find_all('tr')
-    for tr in trs:
-        td = tr.find('td')
-        a = td.find('a')
-        url = a.contents[0]
-        if url.endswith('.onion'):
-            url = 'http://' + url
-            nitterList['tor'].append(url)
+def invidious():
+    r = requests.get('https://api.invidious.io/instances.json')
+    rJson = json.loads(r.text)
+    invidiousList = {}
+    invidiousList['normal'] = []
+    invidiousList['tor'] = []
+    for instance in rJson:
+        if instance[1]['type'] == 'https':
+            invidiousList['normal'].append(instance[1]['uri'])
+        elif instance[1]['type'] == 'onion':
+            invidiousList['tor'].append(instance[1]['uri'])
+    mightyList['invidious'] = invidiousList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Invidious')
+
+
+def piped():
+    json_object = json.dumps(mightyList, ensure_ascii=False, indent=2)
+    with open('./src/instances/piped.json') as file:
+        mightyList['piped'] = json.load(file)
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Piped')
+
+
+def proxitok():
+    r = requests.get(
+        'https://raw.githubusercontent.com/wiki/pablouser1/ProxiTok/Public-instances.md')
+
+    tmp = re.findall(
+        r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|", r.text)
+    proxiTokList = {}
+    proxiTokList['normal'] = []
+    proxiTokList['tor'] = []
+    for item in tmp:
+        proxiTokList['normal'].append(item)
+    mightyList['proxiTok'] = proxiTokList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'ProxiTok')
+
+
+def send():
+    r = requests.get(
+        'https://gitlab.com/timvisee/send-instances/-/raw/master/README.md')
+    tmp = re.findall(
+        r"- ([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}", r.text)
+    sendList = {}
+    sendList['normal'] = []
+    sendList['tor'] = []
+    for item in tmp:
+        sendList['normal'].append(item)
+    mightyList['send'] = sendList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Send')
+
+
+def nitter():
+    r = requests.get('https://github.com/zedeus/nitter/wiki/Instances')
+    soup = BeautifulSoup(r.text, 'html.parser')
+    markdownBody = soup.find(class_='markdown-body')
+    tables = markdownBody.find_all('table')
+    tables.pop(3)
+    tables.pop(3)
+    nitterList = {}
+    nitterList['normal'] = []
+    nitterList['tor'] = []
+    for table in tables:
+        tbody = table.find('tbody')
+        trs = tbody.find_all('tr')
+        for tr in trs:
+            td = tr.find('td')
+            a = td.find('a')
+            url = a.contents[0]
+            if url.endswith('.onion'):
+                url = 'http://' + url
+                nitterList['tor'].append(url)
+            else:
+                url = 'https://' + url
+                nitterList['normal'].append(url)
+    mightyList['nitter'] = nitterList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Nitter')
+
+
+def bibliogram():
+    r = requests.get('https://bibliogram.pussthecat.org/api/instances')
+    rJson = json.loads(r.text)
+    bibliogramList = {}
+    bibliogramList['normal'] = []
+    bibliogramList['tor'] = []
+    for item in rJson['data']:
+        bibliogramList['normal'].append(item['address'])
+    mightyList['bibliogram'] = bibliogramList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Bibliogram')
+
+
+def libreddit():
+    r = requests.get(
+        'https://raw.githubusercontent.com/spikecodes/libreddit/master/README.md')
+    libredditList = {}
+    libredditList['normal'] = []
+    libredditList['tor'] = []
+
+    tmp = re.findall(
+        r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|", r.text)
+
+    tmp = filterLastSlash(tmp)
+
+    for item in tmp:
+        if item.endswith('.onion'):
+            libredditList['tor'].append(item)
         else:
-            url = 'https://' + url
-            nitterList['normal'].append(url)
-mightyList['nitter'] = nitterList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Nitter')
-
-# Bibliogram
-r = requests.get('https://bibliogram.pussthecat.org/api/instances')
-rJson = json.loads(r.text)
-bibliogramList = {}
-bibliogramList['normal'] = []
-bibliogramList['tor'] = []
-for item in rJson['data']:
-    bibliogramList['normal'].append(item['address'])
-mightyList['bibliogram'] = bibliogramList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Bibliogram')
-
-# LibReddit
-r = requests.get(
-    'https://raw.githubusercontent.com/spikecodes/libreddit/master/README.md')
-libredditList = {}
-libredditList['normal'] = []
-libredditList['tor'] = []
-
-tmp = re.findall(
-    r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|", r.text)
-
-tmp = filterLastSlash(tmp)
-
-for item in tmp:
-    if item.endswith('.onion'):
-        libredditList['tor'].append(item)
-    else:
-        libredditList['normal'].append(item)
-mightyList['libreddit'] = libredditList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'LibReddit')
-
-# Teddit
-r = requests.get(
-    'https://codeberg.org/teddit/teddit/raw/branch/main/instances.json')
-rJson = json.loads(r.text)
-tedditList = {}
-tedditList['normal'] = []
-tedditList['tor'] = []
-for item in rJson:
-    url = item['url']
-    if url != '':
-        tedditList['normal'].append(url)
-    if 'onion' in item:
-        onion = item['onion']
-        if onion != '':
-            tedditList['tor'].append(onion)
-
-mightyList['teddit'] = tedditList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Teddit')
-
-
-# Wikiless
-r = requests.get('https://wikiless.org/instances.json')
-rJson = json.loads(r.text)
-wikilessList = {}
-wikilessList['normal'] = []
-wikilessList['tor'] = []
-wikilessList['i2p'] = []
-for item in rJson:
-    if item.endswith('.onion'):
-        wikilessList['tor'].append('http://' + item)
-    elif item.endswith('.i2p'):
-        wikilessList['i2p'].append('http://' + item)
-    else:
-        wikilessList['normal'].append('https://' + item)
-mightyList['wikiless'] = wikilessList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Wikiless')
-
-# Scribe
-r = requests.get(
-    'https://git.sr.ht/~edwardloveall/scribe/blob/main/docs/instances.json')
-rJson = json.loads(r.text)
-scribeList = {}
-scribeList['normal'] = []
-scribeList['tor'] = []
-for item in rJson:
-    scribeList['normal'].append(item)
-mightyList['scribe'] = scribeList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Scribe')
-
-# SimplyTranslate
-r = requests.get('https://simple-web.org/instances/simplytranslate')
-simplyTranslateList = {}
-simplyTranslateList['normal'] = []
-for item in r.text.strip().split('\n'):
-    simplyTranslateList['normal'].append('https://' + item)
-
-r = requests.get('https://simple-web.org/instances/simplytranslate_onion')
-simplyTranslateList['tor'] = []
-for item in r.text.strip().split('\n'):
-    simplyTranslateList['tor'].append('http://' + item)
-
-mightyList['simplyTranslate'] = simplyTranslateList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SimplyTranslate')
-
-# LinvgaTranslate
-r = requests.get(
-    'https://raw.githubusercontent.com/TheDavidDelta/lingva-translate/main/instances.json')
-rJson = json.loads(r.text)
-lingvaList = {}
-lingvaList['normal'] = []
-lingvaList['tor'] = []
-for item in rJson:
-    lingvaList['normal'].append(item)
-mightyList['lingva'] = lingvaList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'LinvgaTranslate')
-
-
-# SearX, SearXNG
-r = requests.get('https://searx.space/data/instances.json')
-rJson = json.loads(r.text)
-searxList = {}
-searxList['tor'] = []
-searxList['i2p'] = []
-searxList['normal'] = []
-searxngList = {}
-searxngList['tor'] = []
-searxngList['i2p'] = []
-searxngList['normal'] = []
-for item in rJson['instances']:
-    if item[:-1].endswith('.onion'):
-        if (rJson['instances'][item].get('generator') == 'searxng'):
-            searxngList['tor'].append(item[:-1])
+            libredditList['normal'].append(item)
+    mightyList['libreddit'] = libredditList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'LibReddit')
+
+
+def teddit():
+    r = requests.get(
+        'https://codeberg.org/teddit/teddit/raw/branch/main/instances.json')
+    rJson = json.loads(r.text)
+    tedditList = {}
+    tedditList['normal'] = []
+    tedditList['tor'] = []
+    for item in rJson:
+        url = item['url']
+        if url != '':
+            tedditList['normal'].append(url)
+        if 'onion' in item:
+            onion = item['onion']
+            if onion != '':
+                tedditList['tor'].append(onion)
+
+    mightyList['teddit'] = tedditList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Teddit')
+
+
+def wikiless():
+    r = requests.get('https://wikiless.org/instances.json')
+    rJson = json.loads(r.text)
+    wikilessList = {}
+    wikilessList['normal'] = []
+    wikilessList['tor'] = []
+    wikilessList['i2p'] = []
+    for item in rJson:
+        if item.endswith('.onion'):
+            wikilessList['tor'].append('http://' + item)
+        elif item.endswith('.i2p'):
+            wikilessList['i2p'].append('http://' + item)
         else:
-            searxList['tor'].append(item[:-1])
-    elif item[:-1].endswith('.i2p'):
-        if (rJson['instances'][item].get('generator') == 'searxng'):
-            searxngList['i2p'].append(item[:-1])
+            wikilessList['normal'].append('https://' + item)
+    mightyList['wikiless'] = wikilessList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Wikiless')
+
+
+def scribe():
+    r = requests.get(
+        'https://git.sr.ht/~edwardloveall/scribe/blob/main/docs/instances.json')
+    rJson = json.loads(r.text)
+    scribeList = {}
+    scribeList['normal'] = []
+    scribeList['tor'] = []
+    for item in rJson:
+        scribeList['normal'].append(item)
+    mightyList['scribe'] = scribeList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Scribe')
+
+
+def simplytranslate():
+    r = requests.get('https://simple-web.org/instances/simplytranslate')
+    simplyTranslateList = {}
+    simplyTranslateList['normal'] = []
+    for item in r.text.strip().split('\n'):
+        simplyTranslateList['normal'].append('https://' + item)
+
+    r = requests.get('https://simple-web.org/instances/simplytranslate_onion')
+    simplyTranslateList['tor'] = []
+    for item in r.text.strip().split('\n'):
+        simplyTranslateList['tor'].append('http://' + item)
+
+    mightyList['simplyTranslate'] = simplyTranslateList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SimplyTranslate')
+
+
+def linvgatranslate():
+    r = requests.get(
+        'https://raw.githubusercontent.com/TheDavidDelta/lingva-translate/main/instances.json')
+    rJson = json.loads(r.text)
+    lingvaList = {}
+    lingvaList['normal'] = []
+    lingvaList['tor'] = []
+    for item in rJson:
+        lingvaList['normal'].append(item)
+    mightyList['lingva'] = lingvaList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'LinvgaTranslate')
+
+
+def searx_searxng():
+    r = requests.get('https://searx.space/data/instances.json')
+    rJson = json.loads(r.text)
+    searxList = {}
+    searxList['tor'] = []
+    searxList['i2p'] = []
+    searxList['normal'] = []
+    searxngList = {}
+    searxngList['tor'] = []
+    searxngList['i2p'] = []
+    searxngList['normal'] = []
+    for item in rJson['instances']:
+        if item[:-1].endswith('.onion'):
+            if (rJson['instances'][item].get('generator') == 'searxng'):
+                searxngList['tor'].append(item[:-1])
+            else:
+                searxList['tor'].append(item[:-1])
+        elif item[:-1].endswith('.i2p'):
+            if (rJson['instances'][item].get('generator') == 'searxng'):
+                searxngList['i2p'].append(item[:-1])
+            else:
+                searxList['i2p'].append(item[:-1])
         else:
-            searxList['i2p'].append(item[:-1])
-    else:
-        if (rJson['instances'][item].get('generator') == 'searxng'):
-            searxngList['normal'].append(item[:-1])
+            if (rJson['instances'][item].get('generator') == 'searxng'):
+                searxngList['normal'].append(item[:-1])
+            else:
+                searxList['normal'].append(item[:-1])
+
+    mightyList['searx'] = searxList
+    mightyList['searxng'] = searxngList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SearX, SearXNG')
+
+
+def whoogle():
+    r = requests.get(
+        'https://raw.githubusercontent.com/benbusby/whoogle-search/main/misc/instances.txt')
+    tmpList = r.text.strip().split('\n')
+    whoogleList = {}
+    whoogleList['normal'] = []
+    whoogleList['tor'] = []
+    whoogleList['i2p'] = []
+    for item in tmpList:
+        if item.endswith('.onion'):
+            whoogleList['tor'].append(item)
+        elif item.endswith('.i2p'):
+            whoogleList['i2p'].append(item)
         else:
-            searxList['normal'].append(item[:-1])
-
-mightyList['searx'] = searxList
-mightyList['searxng'] = searxngList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SearX, SearXNG')
-
-# Whoogle
-r = requests.get(
-    'https://raw.githubusercontent.com/benbusby/whoogle-search/main/misc/instances.txt')
-tmpList = r.text.strip().split('\n')
-whoogleList = {}
-whoogleList['normal'] = []
-whoogleList['tor'] = []
-whoogleList['i2p'] = []
-for item in tmpList:
-    if item.endswith('.onion'):
-        whoogleList['tor'].append(item)
-    elif item.endswith('.i2p'):
-        whoogleList['i2p'].append(item)
-    else:
-        whoogleList['normal'].append(item)
-mightyList['whoogle'] = whoogleList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Whoogle')
-
-# Rimgo
-r = requests.get(
-    'https://codeberg.org/video-prize-ranch/rimgo/raw/branch/main/instances.json')
-rJson = json.loads(r.text)
-rimgoList = {}
-rimgoList['normal'] = []
-rimgoList['tor'] = []
-rimgoList['i2p'] = []
-for item in rJson:
-    if item.endswith('.onion'):
-        rimgoList['tor'].append('http://' + item)
-    elif item.endswith('.i2p'):
-        rimgoList['i2p'].append('http://' + item)
-    else:
-        rimgoList['normal'].append('https://' + item)
-mightyList['rimgo'] = rimgoList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Rimgo')
-
-# Peertube
-r = requests.get(
-    'https://instances.joinpeertube.org/api/v1/instances?start=0&count=1045&sort=-createdAt')
-rJson = json.loads(r.text)
-
-myList = []
-for k in rJson['data']:
-    myList.append('https://'+k['host'])
-
-mightyList['peertube'] = myList
-print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Peertube')
+            whoogleList['normal'].append(item)
+    mightyList['whoogle'] = whoogleList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Whoogle')
+
+
+def rimgo():
+    r = requests.get(
+        'https://codeberg.org/video-prize-ranch/rimgo/raw/branch/main/instances.json')
+    rJson = json.loads(r.text)
+    rimgoList = {}
+    rimgoList['normal'] = []
+    rimgoList['tor'] = []
+    rimgoList['i2p'] = []
+    for item in rJson:
+        if item.endswith('.onion'):
+            rimgoList['tor'].append('http://' + item)
+        elif item.endswith('.i2p'):
+            rimgoList['i2p'].append('http://' + item)
+        else:
+            rimgoList['normal'].append('https://' + item)
+    mightyList['rimgo'] = rimgoList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Rimgo')
+
+
+def peertube():
+    r = requests.get(
+        'https://instances.joinpeertube.org/api/v1/instances?start=0&count=1045&sort=-createdAt')
+    rJson = json.loads(r.text)
+
+    myList = []
+    for k in rJson['data']:
+        myList.append('https://'+k['host'])
+
+    mightyList['peertube'] = myList
+    print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'Peertube')
 
 
 def isValid(url):  # This code is contributed by avanitrachhadiya2155
@@ -331,17 +350,35 @@ def isValid(url):  # This code is contributed by avanitrachhadiya2155
         return False
 
 
+invidious()
+piped()
+proxitok()
+send()
+nitter()
+bibliogram()
+libreddit()
+teddit()
+wikiless()
+scribe()
+simplytranslate()
+linvgatranslate()
+searx_searxng()
+whoogle()
+rimgo()
+
 cloudflareMightyList = []
 for k1, v1 in mightyList.items():
     if type(mightyList[k1]) is dict:
         for k2, v2 in mightyList[k1].items():
             for instance in mightyList[k1][k2]:
-                if (not isValid(instance)):
-                    mightyList[k1][k2].remove(instance)
-                    print("removed " + instance)
-                else:
-                    if not instance.endswith('.onion') and not instance.endswith('.i2p') and is_cloudflare(instance):
-                        cloudflareMightyList.append(instance)
+                # if (not isValid(instance)):
+                #     del mightyList[k1][k2][instance]
+                #     print("removed " + instance)
+                # else:
+                if not instance.endswith('.onion') and not instance.endswith('.i2p') and is_cloudflare(instance):
+                    cloudflareMightyList.append(instance)
+
+peertube()
 
 
 # Writing to file
diff --git a/src/instances/piped.json b/src/instances/piped.json
index fa5cbef0..23cb8855 100644
--- a/src/instances/piped.json
+++ b/src/instances/piped.json
@@ -1,19 +1,17 @@
 {
-  "piped": {
-    "normal": [
-      "https://piped.kavin.rocks",
-      "https://piped.silkky.cloud",
-      "https://piped.tokhmi.xyz",
-      "https://piped.moomoo.me",
-      "https://il.ax",
-      "https://piped.syncpundit.com",
-      "https://piped.mha.fi",
-      "https://piped.mint.lgbt",
-      "https://piped.privacy.com.de",
-      "https://piped.notyourcomputer.net"
-    ],
-    "tor" : [
-      "http://piped2bbch4xslbl2ckr6k62q56kon56ffowxaqzy42ai22a4sash3ad.onion"
-    ]
-  }
-}
+  "normal": [
+    "https://piped.kavin.rocks",
+    "https://piped.silkky.cloud",
+    "https://piped.tokhmi.xyz",
+    "https://piped.moomoo.me",
+    "https://il.ax",
+    "https://piped.syncpundit.com",
+    "https://piped.mha.fi",
+    "https://piped.mint.lgbt",
+    "https://piped.privacy.com.de",
+    "https://piped.notyourcomputer.net"
+  ],
+  "tor": [
+    "http://piped2bbch4xslbl2ckr6k62q56kon56ffowxaqzy42ai22a4sash3ad.onion"
+  ]
+}
\ No newline at end of file