-
-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add support for documnetation sites powered by mkdocs (#9)
* feat: add support for mkdocs * feat: add support for mkdocs * feat: refactor search providers * chore: remove unusued contant * feat(docs): add more sites powered my mkdocs
- Loading branch information
Showing
19 changed files
with
391 additions
and
80 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
from ulauncher.config import CACHE_DIR | ||
import requests | ||
import logging | ||
import json | ||
import os | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
CACHE_FOLDER_PATH = os.path.join(CACHE_DIR, 'ulauncher-docsearch', | ||
'mkdocs-indexes') | ||
|
||
|
||
class MkDocsIndexError(Exception): | ||
pass | ||
|
||
|
||
class MkDocsIndexer(object): | ||
|
||
def index(self, docset_key, docset): | ||
url = docset["search_index_url"] | ||
|
||
r = requests.get(url) | ||
if r.status_code != 200: | ||
raise MkDocsIndexError( | ||
"Error downloading mkdocs index for %s. HTTP error: %s" % | ||
(docset_key, r.status_code)) | ||
|
||
data = r.json() | ||
|
||
if "docs" not in data: | ||
return | ||
|
||
items = [] | ||
|
||
for doc in data["docs"]: | ||
items.append(self.map_item(doc)) | ||
|
||
index_file = self.get_index_file_path(docset_key) | ||
with open(index_file, 'w', encoding='utf-8') as f: | ||
json.dump(items, f) | ||
|
||
def get_index_file_path(self, docset_key): | ||
index_filename = "%s.json" % docset_key | ||
|
||
if not os.path.exists(CACHE_FOLDER_PATH): | ||
os.makedirs(CACHE_FOLDER_PATH) | ||
|
||
return os.path.join(CACHE_FOLDER_PATH, index_filename) | ||
|
||
def map_item(self, data): | ||
return { | ||
'title': data["title"], | ||
'description': data["location"], | ||
'text': self.trim_string(str(data["text"]), 60) | ||
} | ||
|
||
def trim_string(self, s: str, limit: int, ellipsis='…') -> str: | ||
s = s.strip() | ||
if len(s) > limit: | ||
return s[:limit - 1].strip() + ellipsis | ||
|
||
return s |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
import logging | ||
from typing import List | ||
from .base import BaseProvider | ||
from .constants import PROVIDER_ALGOLIA_DOCSEARCH | ||
from .base import SearchException | ||
from docsearch.mapper import DefaultMapper, VercelMapper, PrismaMapper, TerraformMapper, WebDevMapper | ||
from algoliasearch.search_client import SearchClient | ||
from algoliasearch.exceptions import AlgoliaException | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
class AlgoliaProvider(BaseProvider): | ||
|
||
def __init__(self): | ||
self.result_mappers: List = [ | ||
VercelMapper(), | ||
TerraformMapper(), | ||
PrismaMapper(), | ||
WebDevMapper() | ||
] | ||
|
||
def get_name(self): | ||
return PROVIDER_ALGOLIA_DOCSEARCH | ||
|
||
def search(self, docset_key, docset, term): | ||
algolia_client = SearchClient.create(docset['algolia_application_id'], | ||
docset['algolia_api_key']) | ||
|
||
index = algolia_client.init_index(docset['algolia_index']) | ||
|
||
try: | ||
search_results = index.search(term, | ||
self.build_request_options(docset)) | ||
|
||
if not search_results['hits']: | ||
return [] | ||
|
||
return self.map_results(docset_key, docset, search_results["hits"]) | ||
except AlgoliaException as e: | ||
logger.error("Error fetching documentation from algolia: %s", e) | ||
raise SearchException( | ||
"Error fetching documentation from algolia: %s", e) | ||
|
||
def build_request_options(self, docset): | ||
""" | ||
Allow to specify custom search options for a specific docset | ||
Parameters: | ||
docset (string): The identifier of the docset. | ||
""" | ||
opts = {} | ||
|
||
if "facet_filters" in docset: | ||
opts = {"facetFilters": docset["facet_filters"]} | ||
|
||
return opts | ||
|
||
def get_results_mapper(self, docset_key): | ||
""" | ||
Returns the mapper object that will map the specified docset data into the format required by the extension | ||
""" | ||
for mapper in self.result_mappers: | ||
if mapper.get_type() == docset_key: | ||
return mapper | ||
|
||
return DefaultMapper() | ||
|
||
def map_results(self, docset_key, docset_data, results): | ||
""" Maps the results returned by Algolia Search """ | ||
|
||
mapper = self.get_results_mapper(docset_key) | ||
items = [] | ||
for hit in results: | ||
mapped_item = mapper.map(docset_data, hit) | ||
items.append(mapped_item) | ||
|
||
return items |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
class BaseProvider(object): | ||
|
||
def get_name(self): | ||
raise NotImplementedError() | ||
|
||
def search(self, docset_key, docset, term): | ||
raise NotImplementedError() | ||
|
||
|
||
class SearchException(Exception): | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
PROVIDER_ALGOLIA_DOCSEARCH = "algolia" | ||
PROVIDER_MKDOCS = "mkdocs" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
from typing import List | ||
from .base import BaseProvider | ||
from .algolia import AlgoliaProvider | ||
from .mkdocs import MkDocsProvider | ||
|
||
|
||
class ProviderFactory(object): | ||
|
||
def __init__(self): | ||
self.providers: List[BaseProvider] = [ | ||
MkDocsProvider(), AlgoliaProvider() | ||
] | ||
|
||
def get(self, name: str) -> BaseProvider: | ||
for provider in self.providers: | ||
if provider.get_name() == name: | ||
return provider | ||
|
||
raise RuntimeError("Provider with name '%s' was not found" % name) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import functools | ||
import os | ||
import json | ||
from .base import BaseProvider | ||
from .constants import PROVIDER_MKDOCS | ||
|
||
from docsearch.indexers.mkdocs import CACHE_FOLDER_PATH | ||
|
||
|
||
class MkDocsProvider(BaseProvider): | ||
|
||
def get_name(self): | ||
return PROVIDER_MKDOCS | ||
|
||
def search(self, docset_key, docset, query): | ||
|
||
data = self.read_mkdocs_index_file(docset_key) | ||
|
||
results = [] | ||
for item in data: | ||
if query.lower() in item["title"].lower(): | ||
results.append({ | ||
'url': | ||
"{}/{}".format(docset["url"], item["description"]), | ||
'title': | ||
item["title"], | ||
'icon': | ||
docset['icon'], | ||
'category': | ||
item['description'], | ||
}) | ||
|
||
return results | ||
|
||
@functools.lru_cache(maxsize=10) | ||
def read_mkdocs_index_file(self, docset_key): | ||
file_path = os.path.join(CACHE_FOLDER_PATH, "%s.json" % docset_key) | ||
with open(file_path) as f: | ||
return json.load(f) |
Oops, something went wrong.