great rust & javascript example. clean code!
This commit is contained in:
parent
b5d6b4541a
commit
cee5e5fffd
|
@ -0,0 +1,22 @@
|
||||||
|
# DNS over Wikipedia
|
||||||
|
|
||||||
|
Wikipedia keeps track of official URLs for popular websites. With DNS over Wikipedia installed, domains ending with `.idk` are redirected by searching Wikipedia and extracting the relevant URL from the infobox.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
1. Type `scihub.idk/` in browser address bar
|
||||||
|
2. Observe redirect to `https://sci-hub.tw` (at the time of writing)
|
||||||
|
|
||||||
|
<img src="./demo.gif" width="600"/>
|
||||||
|
|
||||||
|
> Instead of googling for the site, I google for the site's Wikipedia article ("schihub wiki") which usually has an up-to-date link to the site in the sidebar, whereas Google is forced to censor their results.
|
||||||
|
>
|
||||||
|
> If you Google "Piratebay", the first search result is a fake "thepirate-bay.org" (with a dash) but the Wikipedia article lists the right one.
|
||||||
|
> — [shpx](https://news.ycombinator.com/item?id=22414031)
|
||||||
|
|
||||||
|
## Installation Options
|
||||||
|
|
||||||
|
#### [Chrome Extension](https://chrome.google.com/webstore/detail/mjmjpfncapfopnommmngnmjalkopljji/)
|
||||||
|
|
||||||
|
#### [Firefox Extension](https://addons.mozilla.org/en-US/firefox/addon/dns-over-wikipedia/)
|
||||||
|
|
||||||
|
#### [(optional) Rust Redirect Script](./hosts-file)
|
|
@ -0,0 +1,47 @@
|
||||||
|
var apiPrefix = "https://en.wikipedia.org/w/api.php?action=query&prop=info&inprop=url&format=json&origin=*&titles=";
|
||||||
|
|
||||||
|
function request(url) {
|
||||||
|
var x = new XMLHttpRequest();
|
||||||
|
x.open('GET', url, false);
|
||||||
|
x.send(null);
|
||||||
|
return x.responseText
|
||||||
|
}
|
||||||
|
|
||||||
|
function redirect(searchQuery) {
|
||||||
|
var apiData = JSON.parse(request(apiPrefix+searchQuery));
|
||||||
|
var wikiUrl = Object.values(apiData.query.pages)[0].fullurl;
|
||||||
|
var html = request(wikiUrl);
|
||||||
|
var doc = new DOMParser().parseFromString(html, "text/html");
|
||||||
|
var infoboxRows = doc.querySelectorAll('table.infobox tbody tr');
|
||||||
|
infoboxRows = Array.from(infoboxRows);
|
||||||
|
var wikiUrlRow = infoboxRows.filter(x => x.innerText.match(/(?:URL)|(?:Website)/));
|
||||||
|
|
||||||
|
if (wikiUrlRow[0]) {
|
||||||
|
return wikiUrlRow[0].querySelector('a').href;
|
||||||
|
} else {
|
||||||
|
return wikiUrl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var root = null;
|
||||||
|
if (typeof chrome !== 'undefined') {
|
||||||
|
root = chrome; // chrome
|
||||||
|
} else if (typeof browser !== 'undefined') {
|
||||||
|
root = browser; // firefox
|
||||||
|
}
|
||||||
|
|
||||||
|
root.webRequest.onBeforeRequest.addListener(
|
||||||
|
(requestDetails) => {
|
||||||
|
var hostname = new URL(requestDetails.url).hostname
|
||||||
|
var searchQuery = hostname.replace(/\.idk$/, '');
|
||||||
|
return {redirectUrl: redirect(searchQuery)};
|
||||||
|
},
|
||||||
|
{urls: ["*://*.idk/*"]},
|
||||||
|
["blocking"]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (typeof chrome !== 'undefined') {
|
||||||
|
chrome.omnibox.onInputEntered.addListener(function(text) {
|
||||||
|
chrome.tabs.update({ url: redirect(text) });
|
||||||
|
});
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"manifest_version": 2,
|
||||||
|
"name": "DNS over Wikipedia",
|
||||||
|
"version": "0.0.5",
|
||||||
|
"description": "Resolve .idk domains using the URL provided by the Wikipedia page for a given topic",
|
||||||
|
"homepage_url": "https://github.com/aaronjanse/dns-over-wikipedia",
|
||||||
|
|
||||||
|
"background": {
|
||||||
|
"scripts": ["common.js"]
|
||||||
|
},
|
||||||
|
"permissions": [
|
||||||
|
"webRequest", "webRequestBlocking",
|
||||||
|
"*://*.idk/*", "https://en.wikipedia.org/*"
|
||||||
|
],
|
||||||
|
"omnibox": { "keyword" : "idk" }
|
||||||
|
}
|
Binary file not shown.
After Width: | Height: | Size: 677 KiB |
|
@ -0,0 +1,2 @@
|
||||||
|
/target
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,13 @@
|
||||||
|
[package]
|
||||||
|
name = "dns-over-wikipedia"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Aaron Janse <aaron@ajanse.me>", "Vihan Bhargava <vihan+github@vihan.org>"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nickel = "*"
|
||||||
|
reqwest = { version = "0.10", features = ["blocking"] }
|
||||||
|
json = "0.12.4"
|
||||||
|
scraper = "0.11.0"
|
|
@ -0,0 +1,44 @@
|
||||||
|
Required dependencies:
|
||||||
|
|
||||||
|
- dnsmasq
|
||||||
|
- Ubuntu: `sudo apt install dnsmasq`
|
||||||
|
- macOS: `brew install dnsmasq`
|
||||||
|
- Rust (and Cargo)
|
||||||
|
|
||||||
|
### 1. Configure dnsmasq
|
||||||
|
|
||||||
|
#### Linux:
|
||||||
|
Add the following to your `dnsmasq.conf`:
|
||||||
|
|
||||||
|
```
|
||||||
|
address=/.idk/127.0.0.1
|
||||||
|
```
|
||||||
|
|
||||||
|
#### macOS:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "address=/.idk/127.0.0.1" >> "$(brew --prefix)/etc/dnsmasq.conf"
|
||||||
|
sudo brew services start dnsmasq
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Use dnsmasq to resolve queries
|
||||||
|
|
||||||
|
#### macOS:
|
||||||
|
```bash
|
||||||
|
sudo mkdir -p /etc/resolver
|
||||||
|
sudo tee -a /etc/resolver/idk > /dev/null << EOF
|
||||||
|
nameserver 127.0.0.1
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Installing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo install --path .
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Running
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo dns-over-wikipedia
|
||||||
|
```
|
|
@ -0,0 +1,6 @@
|
||||||
|
with import <nixpkgs> {};
|
||||||
|
|
||||||
|
mkShell {
|
||||||
|
name = "dns-over-wikipedia";
|
||||||
|
buildInputs = [ rustup openssl pkgconfig ];
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
#[macro_use] extern crate nickel;
|
||||||
|
|
||||||
|
use nickel::Nickel;
|
||||||
|
use nickel::status::StatusCode;
|
||||||
|
use nickel::hyper::header::{Host, Location};
|
||||||
|
|
||||||
|
mod wikipedia;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut server = Nickel::new();
|
||||||
|
|
||||||
|
server.utilize(router! {
|
||||||
|
get "**" => |req, mut res| {
|
||||||
|
let headers = &req.origin.headers;
|
||||||
|
let hostname = &headers.get::<Host>().unwrap().hostname;
|
||||||
|
|
||||||
|
let page_name = hostname.split(".").next().unwrap();
|
||||||
|
|
||||||
|
let target_site = wikipedia::search_page_url(page_name).unwrap();
|
||||||
|
|
||||||
|
res.set(StatusCode::PermanentRedirect)
|
||||||
|
.set(Location(target_site.into()));
|
||||||
|
|
||||||
|
""
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// This way if the port can't bind the application will panic instead of silently erroring.
|
||||||
|
server.listen("127.0.0.1:80").unwrap();
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
use std::error::Error;
|
||||||
|
use scraper::Selector;
|
||||||
|
use scraper::Html;
|
||||||
|
|
||||||
|
pub fn search_page_url(name: &str) -> Result<String, Box<dyn Error>> {
|
||||||
|
let request_url = format!("https://en.wikipedia.org/w/api.php?action=query&prop=info&inprop=url&format=json&titles={page_name}",
|
||||||
|
page_name = name);
|
||||||
|
|
||||||
|
let api_response =
|
||||||
|
reqwest::blocking::get(&request_url)?
|
||||||
|
.text()?;
|
||||||
|
let api_response = json::parse(&api_response)?;
|
||||||
|
|
||||||
|
let wiki_url: &str = api_response["query"]["pages"]
|
||||||
|
.entries()
|
||||||
|
.next().ok_or("failed to extract entry from api")?
|
||||||
|
.1["fullurl"]
|
||||||
|
.as_str().ok_or("failed to extract fullurl from api entry")?;
|
||||||
|
|
||||||
|
let wikipedia_page_content = reqwest::blocking::get(wiki_url)?.text()?;
|
||||||
|
|
||||||
|
return match extract_url(&wikipedia_page_content) {
|
||||||
|
Err(_) => Ok(wiki_url.to_string()),
|
||||||
|
Ok(dest) => Ok(dest)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract_url(wikipedia_page_content: &str) -> Result<String, String> {
|
||||||
|
let select_urls = Selector::parse("table.infobox tbody tr").unwrap();
|
||||||
|
let select_link = Selector::parse("a").unwrap();
|
||||||
|
|
||||||
|
let document = Html::parse_document(&wikipedia_page_content);
|
||||||
|
|
||||||
|
let rows = document.select(&select_urls);
|
||||||
|
let urls = rows.filter(|x| {
|
||||||
|
let text = x.text().collect::<Vec<_>>();
|
||||||
|
text.contains(&"URL") || text.contains(&"Website")
|
||||||
|
}).next().ok_or("couldn't find url in infobox")?;
|
||||||
|
|
||||||
|
let link = urls.select(&select_link).next()
|
||||||
|
.ok_or("couldn't find link within infobox url entry")?;
|
||||||
|
Ok(link.value().attr("href").ok_or("could not find href")?.to_string())
|
||||||
|
}
|
Loading…
Reference in New Issue