Compare commits
No commits in common. "cee5e5fffd3d0de05a72edd92912ea9a12693a72" and "9b57621ecd84f099be41fbfc3fef793871026dd9" have entirely different histories.
cee5e5fffd
...
9b57621ecd
|
@ -1,22 +0,0 @@
|
||||||
# DNS over Wikipedia
|
|
||||||
|
|
||||||
Wikipedia keeps track of official URLs for popular websites. With DNS over Wikipedia installed, domains ending with `.idk` are redirected by searching Wikipedia and extracting the relevant URL from the infobox.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
1. Type `scihub.idk/` in browser address bar
|
|
||||||
2. Observe redirect to `https://sci-hub.tw` (at the time of writing)
|
|
||||||
|
|
||||||
<img src="./demo.gif" width="600"/>
|
|
||||||
|
|
||||||
> Instead of googling for the site, I google for the site's Wikipedia article ("schihub wiki") which usually has an up-to-date link to the site in the sidebar, whereas Google is forced to censor their results.
|
|
||||||
>
|
|
||||||
> If you Google "Piratebay", the first search result is a fake "thepirate-bay.org" (with a dash) but the Wikipedia article lists the right one.
|
|
||||||
> — [shpx](https://news.ycombinator.com/item?id=22414031)
|
|
||||||
|
|
||||||
## Installation Options
|
|
||||||
|
|
||||||
#### [Chrome Extension](https://chrome.google.com/webstore/detail/mjmjpfncapfopnommmngnmjalkopljji/)
|
|
||||||
|
|
||||||
#### [Firefox Extension](https://addons.mozilla.org/en-US/firefox/addon/dns-over-wikipedia/)
|
|
||||||
|
|
||||||
#### [(optional) Rust Redirect Script](./hosts-file)
|
|
|
@ -1,47 +0,0 @@
|
||||||
var apiPrefix = "https://en.wikipedia.org/w/api.php?action=query&prop=info&inprop=url&format=json&origin=*&titles=";
|
|
||||||
|
|
||||||
function request(url) {
|
|
||||||
var x = new XMLHttpRequest();
|
|
||||||
x.open('GET', url, false);
|
|
||||||
x.send(null);
|
|
||||||
return x.responseText
|
|
||||||
}
|
|
||||||
|
|
||||||
function redirect(searchQuery) {
|
|
||||||
var apiData = JSON.parse(request(apiPrefix+searchQuery));
|
|
||||||
var wikiUrl = Object.values(apiData.query.pages)[0].fullurl;
|
|
||||||
var html = request(wikiUrl);
|
|
||||||
var doc = new DOMParser().parseFromString(html, "text/html");
|
|
||||||
var infoboxRows = doc.querySelectorAll('table.infobox tbody tr');
|
|
||||||
infoboxRows = Array.from(infoboxRows);
|
|
||||||
var wikiUrlRow = infoboxRows.filter(x => x.innerText.match(/(?:URL)|(?:Website)/));
|
|
||||||
|
|
||||||
if (wikiUrlRow[0]) {
|
|
||||||
return wikiUrlRow[0].querySelector('a').href;
|
|
||||||
} else {
|
|
||||||
return wikiUrl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var root = null;
|
|
||||||
if (typeof chrome !== 'undefined') {
|
|
||||||
root = chrome; // chrome
|
|
||||||
} else if (typeof browser !== 'undefined') {
|
|
||||||
root = browser; // firefox
|
|
||||||
}
|
|
||||||
|
|
||||||
root.webRequest.onBeforeRequest.addListener(
|
|
||||||
(requestDetails) => {
|
|
||||||
var hostname = new URL(requestDetails.url).hostname
|
|
||||||
var searchQuery = hostname.replace(/\.idk$/, '');
|
|
||||||
return {redirectUrl: redirect(searchQuery)};
|
|
||||||
},
|
|
||||||
{urls: ["*://*.idk/*"]},
|
|
||||||
["blocking"]
|
|
||||||
);
|
|
||||||
|
|
||||||
if (typeof chrome !== 'undefined') {
|
|
||||||
chrome.omnibox.onInputEntered.addListener(function(text) {
|
|
||||||
chrome.tabs.update({ url: redirect(text) });
|
|
||||||
});
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
{
|
|
||||||
"manifest_version": 2,
|
|
||||||
"name": "DNS over Wikipedia",
|
|
||||||
"version": "0.0.5",
|
|
||||||
"description": "Resolve .idk domains using the URL provided by the Wikipedia page for a given topic",
|
|
||||||
"homepage_url": "https://github.com/aaronjanse/dns-over-wikipedia",
|
|
||||||
|
|
||||||
"background": {
|
|
||||||
"scripts": ["common.js"]
|
|
||||||
},
|
|
||||||
"permissions": [
|
|
||||||
"webRequest", "webRequestBlocking",
|
|
||||||
"*://*.idk/*", "https://en.wikipedia.org/*"
|
|
||||||
],
|
|
||||||
"omnibox": { "keyword" : "idk" }
|
|
||||||
}
|
|
Binary file not shown.
Before Width: | Height: | Size: 677 KiB |
|
@ -1,2 +0,0 @@
|
||||||
/target
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,13 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "dns-over-wikipedia"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["Aaron Janse <aaron@ajanse.me>", "Vihan Bhargava <vihan+github@vihan.org>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
nickel = "*"
|
|
||||||
reqwest = { version = "0.10", features = ["blocking"] }
|
|
||||||
json = "0.12.4"
|
|
||||||
scraper = "0.11.0"
|
|
|
@ -1,44 +0,0 @@
|
||||||
Required dependencies:
|
|
||||||
|
|
||||||
- dnsmasq
|
|
||||||
- Ubuntu: `sudo apt install dnsmasq`
|
|
||||||
- macOS: `brew install dnsmasq`
|
|
||||||
- Rust (and Cargo)
|
|
||||||
|
|
||||||
### 1. Configure dnsmasq
|
|
||||||
|
|
||||||
#### Linux:
|
|
||||||
Add the following to your `dnsmasq.conf`:
|
|
||||||
|
|
||||||
```
|
|
||||||
address=/.idk/127.0.0.1
|
|
||||||
```
|
|
||||||
|
|
||||||
#### macOS:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "address=/.idk/127.0.0.1" >> "$(brew --prefix)/etc/dnsmasq.conf"
|
|
||||||
sudo brew services start dnsmasq
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Use dnsmasq to resolve queries
|
|
||||||
|
|
||||||
#### macOS:
|
|
||||||
```bash
|
|
||||||
sudo mkdir -p /etc/resolver
|
|
||||||
sudo tee -a /etc/resolver/idk > /dev/null << EOF
|
|
||||||
nameserver 127.0.0.1
|
|
||||||
EOF
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Installing
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo install --path .
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Running
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo dns-over-wikipedia
|
|
||||||
```
|
|
|
@ -1,6 +0,0 @@
|
||||||
with import <nixpkgs> {};
|
|
||||||
|
|
||||||
mkShell {
|
|
||||||
name = "dns-over-wikipedia";
|
|
||||||
buildInputs = [ rustup openssl pkgconfig ];
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
#[macro_use] extern crate nickel;
|
|
||||||
|
|
||||||
use nickel::Nickel;
|
|
||||||
use nickel::status::StatusCode;
|
|
||||||
use nickel::hyper::header::{Host, Location};
|
|
||||||
|
|
||||||
mod wikipedia;
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut server = Nickel::new();
|
|
||||||
|
|
||||||
server.utilize(router! {
|
|
||||||
get "**" => |req, mut res| {
|
|
||||||
let headers = &req.origin.headers;
|
|
||||||
let hostname = &headers.get::<Host>().unwrap().hostname;
|
|
||||||
|
|
||||||
let page_name = hostname.split(".").next().unwrap();
|
|
||||||
|
|
||||||
let target_site = wikipedia::search_page_url(page_name).unwrap();
|
|
||||||
|
|
||||||
res.set(StatusCode::PermanentRedirect)
|
|
||||||
.set(Location(target_site.into()));
|
|
||||||
|
|
||||||
""
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// This way if the port can't bind the application will panic instead of silently erroring.
|
|
||||||
server.listen("127.0.0.1:80").unwrap();
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
use std::error::Error;
|
|
||||||
use scraper::Selector;
|
|
||||||
use scraper::Html;
|
|
||||||
|
|
||||||
pub fn search_page_url(name: &str) -> Result<String, Box<dyn Error>> {
|
|
||||||
let request_url = format!("https://en.wikipedia.org/w/api.php?action=query&prop=info&inprop=url&format=json&titles={page_name}",
|
|
||||||
page_name = name);
|
|
||||||
|
|
||||||
let api_response =
|
|
||||||
reqwest::blocking::get(&request_url)?
|
|
||||||
.text()?;
|
|
||||||
let api_response = json::parse(&api_response)?;
|
|
||||||
|
|
||||||
let wiki_url: &str = api_response["query"]["pages"]
|
|
||||||
.entries()
|
|
||||||
.next().ok_or("failed to extract entry from api")?
|
|
||||||
.1["fullurl"]
|
|
||||||
.as_str().ok_or("failed to extract fullurl from api entry")?;
|
|
||||||
|
|
||||||
let wikipedia_page_content = reqwest::blocking::get(wiki_url)?.text()?;
|
|
||||||
|
|
||||||
return match extract_url(&wikipedia_page_content) {
|
|
||||||
Err(_) => Ok(wiki_url.to_string()),
|
|
||||||
Ok(dest) => Ok(dest)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_url(wikipedia_page_content: &str) -> Result<String, String> {
|
|
||||||
let select_urls = Selector::parse("table.infobox tbody tr").unwrap();
|
|
||||||
let select_link = Selector::parse("a").unwrap();
|
|
||||||
|
|
||||||
let document = Html::parse_document(&wikipedia_page_content);
|
|
||||||
|
|
||||||
let rows = document.select(&select_urls);
|
|
||||||
let urls = rows.filter(|x| {
|
|
||||||
let text = x.text().collect::<Vec<_>>();
|
|
||||||
text.contains(&"URL") || text.contains(&"Website")
|
|
||||||
}).next().ok_or("couldn't find url in infobox")?;
|
|
||||||
|
|
||||||
let link = urls.select(&select_link).next()
|
|
||||||
.ok_or("couldn't find link within infobox url entry")?;
|
|
||||||
Ok(link.value().attr("href").ok_or("could not find href")?.to_string())
|
|
||||||
}
|
|
|
@ -21,6 +21,7 @@ var afunc funcWait
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
func generateNumbers(total int, ch chan<- *funcWait, af *funcWait) {
|
func generateNumbers(total int, ch chan<- *funcWait, af *funcWait) {
|
||||||
|
|
||||||
log.Println("generateNumbers() START total =", total)
|
log.Println("generateNumbers() START total =", total)
|
||||||
ch <- af
|
ch <- af
|
||||||
log.Println("generateNumbers() END total =", total)
|
log.Println("generateNumbers() END total =", total)
|
||||||
|
@ -33,7 +34,6 @@ func andlabsGoroutine(ch <-chan *funcWait, wg *sync.WaitGroup) {
|
||||||
for f := range ch {
|
for f := range ch {
|
||||||
log.Println("andlabsGoroutine() read f() from channel")
|
log.Println("andlabsGoroutine() read f() from channel")
|
||||||
f.f()
|
f.f()
|
||||||
f.wgF.Done()
|
|
||||||
}
|
}
|
||||||
log.Printf("andlabsGoroutine() END")
|
log.Printf("andlabsGoroutine() END")
|
||||||
}
|
}
|
||||||
|
@ -54,12 +54,8 @@ func main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
for idx := 1; idx <= 10; idx++ {
|
for idx := 1; idx <= 10; idx++ {
|
||||||
log.Println("START waitgroup idx =", idx)
|
|
||||||
afunc.val = idx * 20
|
afunc.val = idx * 20
|
||||||
afunc.wgF.Add(1)
|
|
||||||
generateNumbers(idx, functionChan, &afunc)
|
generateNumbers(idx, functionChan, &afunc)
|
||||||
afunc.wgF.Wait()
|
|
||||||
log.Println("END waitgroup idx =", idx)
|
|
||||||
}
|
}
|
||||||
wg.Done()
|
wg.Done()
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue