@Override //query for details using future too? since i already have to make 2 queries, making 3 in parallel wont make it much slower; the only concern is rate limit //already doing sequential 3 queries, aint too slow so its fine
public CommandResult execute(TextChannel channel, User author, Message msg, String[] args) {
- channel.sendMessage("Unknown wiki name!").queue();
- return new CommandResult(CommandResultType.NORESULT);
- }
- }*/
- //i can use this instead of the above code since wikia redirects with 301 with no exceptions; when everything finishes moving, i might have to change it to fandom though
- System.out.println("https://" + wiki + ".wikia.com/api/v1/Search/List?query=" + search + "&limit=25&minArticleQuality=10&batch=1&namespaces=0%2C14");
- url = (HttpURLConnection) new URL("https://" + wiki + ".wikia.com/api/v1/Search/List?query=" + search + "&limit=25&minArticleQuality=10&batch=1&namespaces=0%2C14").openConnection();
- return new CommandResult(CommandResultType.FAILURE, "Unknown wiki name!");
- }
- } catch (IOException e) {
- if(url.getResponseCode() == 404) {
- return new CommandResult(CommandResultType.NORESULT, "it in the " + init[0] + " wiki");
- }
- return new CommandResult(CommandResultType.ERROR, ExceptionUtils.getStackTrace(e));
+ //the wikia domain is still in use; no need to swap to fandom.com for now
+ //alternative search endpoint (more of an autocomplete only but much faster): "https://" + wiki + ".wikia.com/wikia.php?controller=LinkSuggest&method=getLinkSuggestions&format=json&query=" + search
+ url = (HttpURLConnection) new URL("https://" + wiki + ".wikia.com/api.php?action=query&format=json&list=search&srsearch=" + search).openConnection();
+ if(url.getResponseCode() == 404) {
+ return new CommandResult(CommandResultType.FAILURE, "Unknown wiki name!"); //404 means unknown wiki now
}
- JSONObject item;
+
+ //get result
+ int id;
try {
- result = new JSONTokener(stream);
- JSONArray main = new JSONObject(result).getJSONArray("items");
- item = main.getJSONObject(num);
+ JSONObject result = new JSONObject(new JSONTokener(url.getInputStream()));
+ id = result.getJSONObject("query").getJSONArray("search").getJSONObject(num).getInt("pageid");
} catch(JSONException e) {
return new CommandResult(CommandResultType.NORESULT, "it in the " + init[0] + " wiki");
}
- // System.out.println(i);
- //JSONArray array = new JSONObject(new JSONTokener(new URL("https://" + wiki + (old ? ".wikia" : ".fandom") + ".com/api/v1/Articles/AsSimpleJson?id=" + item.getInt("id")).openStream())).getJSONArray("sections").getJSONObject(0).getJSONArray("content");
- JSONArray array = new JSONObject(new JSONTokener(new URL("https://" + wiki + ".wikia.com/api/v1/Articles/AsSimpleJson?id=" + item.getInt("id")).openStream())).getJSONArray("sections").getJSONObject(0).getJSONArray("content");
+ //only use until the last full stop before table of content or end for slightly better formatting
+ //there might be false positives for table of content detection since its just checking 1 after full stop, but honestly rarely less details > commonly being ugly af
+ String desc = info.getString("abstract").replaceAll("^(?:(.*?\\.) ?1 .*|(.*\\.) .*?)$", "$1$2"); //greedy if table of content is present, else lazy to get the last
+ eb.setDescription(desc.matches(".*\\.$") ? desc : (desc + "...")); //if everything fails (aka last char aint a full stop) give it the good ol ... treatment