Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added thefappening.sexy ripper #1514

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package com.rarchives.ripme.ripper.rippers;

import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.ripper.AbstractRipper;
import com.rarchives.ripme.utils.Http;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;

public class ThefappeningsexyRipper extends AbstractHTMLRipper {

private static final String DOMAIN = "thefappening.sexy", HOST = "thefappening";

public ThefappeningsexyRipper(URL url) throws IOException {
super(url);
}

@Override
public String getHost() {
return HOST;
}

@Override
public String getDomain() {
return DOMAIN;
}

@Override
public String getGID(URL url) throws MalformedURLException{

try {
Document doc = Jsoup.connect(url.toString())
.userAgent(AbstractRipper.USER_AGENT)
.get();
String title = doc.title();
return title.replace(" | The Fappening: Back At It Again!", "");
} catch (Exception e) {
e.printStackTrace();
return "missed";
}
}

@Override
public Document getFirstPage() throws IOException {
return Http.url(url).get();
}

@Override
public Document getNextPage(Document doc) throws IOException {
Elements nextPageLink = doc.select("div.navigationBar a[rel=next]");
if (nextPageLink.isEmpty()){
throw new IOException("No more pages");
} else {
URL nextURL = new URL(this.url, nextPageLink.first().attr("href"));
return Http.url(nextURL).get();
}
}

@Override
public List<String> getURLsFromPage(Document doc) {

List<String> result = new ArrayList<>();

for (Element el : doc.select("img.thumbnail")) {
String url = el.attr("src");
String correctedURL = url.replace("_data/i/", "").replace("-th","");
result.add("https://thefappening.sexy/albums/" + correctedURL);
}
return result;

}

@Override
public void downloadURL(URL url, int index) {
addURLToDownload(url, getPrefix(index));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package com.rarchives.ripme.tst.ripper.rippers;

import com.rarchives.ripme.ripper.rippers.ThefappeningsexyRipper;
import org.junit.jupiter.api.Test;

import java.io.IOException;
import java.net.URL;

public class ThefappeningsexyRipperTest extends RippersTest {

@Test
public void testThefappeningsexyRip() throws IOException {
ThefappeningsexyRipper ripper = new ThefappeningsexyRipper(new URL("https://thefappening.sexy/albums/index.php?/category/1"));
testRipper(ripper);
}

@Test
public void testGetGID() throws IOException {
URL url = new URL("https://thefappening.sexy/albums/index.php?/category/1");
ThefappeningsexyRipper ripper = new ThefappeningsexyRipper(url);
assertEquals("Ali Michael", ripper.getGID(url));
}

}