mirror of
https://github.com/mihonapp/mihon.git
synced 2024-11-21 20:47:03 -05:00
Refactor archive support with libarchive (#949)
* Refactor archive support with libarchive * Revert string resource changs * Only mark archive formats as supported Comic book archives should not be compressed. * Fixup * Remove epub from archive format list * Move to mihon package * Format * Cleanup
This commit is contained in:
parent
36e40c0997
commit
239c38982c
22 changed files with 239 additions and 233 deletions
|
@ -203,7 +203,6 @@ dependencies {
|
||||||
// Disk
|
// Disk
|
||||||
implementation(libs.disklrucache)
|
implementation(libs.disklrucache)
|
||||||
implementation(libs.unifile)
|
implementation(libs.unifile)
|
||||||
implementation(libs.bundles.archive)
|
|
||||||
|
|
||||||
// Preferences
|
// Preferences
|
||||||
implementation(libs.preferencektx)
|
implementation(libs.preferencektx)
|
||||||
|
|
3
app/proguard-rules.pro
vendored
3
app/proguard-rules.pro
vendored
|
@ -77,9 +77,6 @@
|
||||||
# XmlUtil
|
# XmlUtil
|
||||||
-keep public enum nl.adaptivity.xmlutil.EventType { *; }
|
-keep public enum nl.adaptivity.xmlutil.EventType { *; }
|
||||||
|
|
||||||
# Apache Commons Compress
|
|
||||||
-keep class * extends org.apache.commons.compress.archivers.zip.ZipExtraField { <init>(); }
|
|
||||||
|
|
||||||
# Firebase
|
# Firebase
|
||||||
-keep class com.google.firebase.installations.** { *; }
|
-keep class com.google.firebase.installations.** { *; }
|
||||||
-keep interface com.google.firebase.installations.** { *; }
|
-keep interface com.google.firebase.installations.** { *; }
|
||||||
|
|
|
@ -38,6 +38,7 @@ import kotlinx.coroutines.flow.update
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
import kotlinx.coroutines.supervisorScope
|
import kotlinx.coroutines.supervisorScope
|
||||||
import logcat.LogPriority
|
import logcat.LogPriority
|
||||||
|
import mihon.core.common.archive.ZipWriter
|
||||||
import nl.adaptivity.xmlutil.serialization.XML
|
import nl.adaptivity.xmlutil.serialization.XML
|
||||||
import okhttp3.Response
|
import okhttp3.Response
|
||||||
import tachiyomi.core.common.i18n.stringResource
|
import tachiyomi.core.common.i18n.stringResource
|
||||||
|
@ -58,12 +59,8 @@ import tachiyomi.domain.track.interactor.GetTracks
|
||||||
import tachiyomi.i18n.MR
|
import tachiyomi.i18n.MR
|
||||||
import uy.kohesive.injekt.Injekt
|
import uy.kohesive.injekt.Injekt
|
||||||
import uy.kohesive.injekt.api.get
|
import uy.kohesive.injekt.api.get
|
||||||
import java.io.BufferedOutputStream
|
|
||||||
import java.io.File
|
import java.io.File
|
||||||
import java.util.Locale
|
import java.util.Locale
|
||||||
import java.util.zip.CRC32
|
|
||||||
import java.util.zip.ZipEntry
|
|
||||||
import java.util.zip.ZipOutputStream
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is the one in charge of downloading chapters.
|
* This class is the one in charge of downloading chapters.
|
||||||
|
@ -594,25 +591,9 @@ class Downloader(
|
||||||
tmpDir: UniFile,
|
tmpDir: UniFile,
|
||||||
) {
|
) {
|
||||||
val zip = mangaDir.createFile("$dirname.cbz$TMP_DIR_SUFFIX")!!
|
val zip = mangaDir.createFile("$dirname.cbz$TMP_DIR_SUFFIX")!!
|
||||||
ZipOutputStream(BufferedOutputStream(zip.openOutputStream())).use { zipOut ->
|
ZipWriter(context, zip).use { writer ->
|
||||||
zipOut.setMethod(ZipEntry.STORED)
|
tmpDir.listFiles()?.forEach { file ->
|
||||||
|
writer.write(file)
|
||||||
tmpDir.listFiles()?.forEach { img ->
|
|
||||||
img.openInputStream().use { input ->
|
|
||||||
val data = input.readBytes()
|
|
||||||
val size = img.length()
|
|
||||||
val entry = ZipEntry(img.name).apply {
|
|
||||||
val crc = CRC32().apply {
|
|
||||||
update(data)
|
|
||||||
}
|
|
||||||
setCrc(crc.value)
|
|
||||||
|
|
||||||
compressedSize = size
|
|
||||||
setSize(size)
|
|
||||||
}
|
|
||||||
zipOut.putNextEntry(entry)
|
|
||||||
zipOut.write(data)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
zip.renameTo("$dirname.cbz")
|
zip.renameTo("$dirname.cbz")
|
||||||
|
|
|
@ -3,26 +3,22 @@ package eu.kanade.tachiyomi.ui.reader.loader
|
||||||
import eu.kanade.tachiyomi.source.model.Page
|
import eu.kanade.tachiyomi.source.model.Page
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
||||||
import eu.kanade.tachiyomi.util.lang.compareToCaseInsensitiveNaturalOrder
|
import eu.kanade.tachiyomi.util.lang.compareToCaseInsensitiveNaturalOrder
|
||||||
import mihon.core.common.extensions.toZipFile
|
import mihon.core.common.archive.ArchiveReader
|
||||||
import tachiyomi.core.common.util.system.ImageUtil
|
import tachiyomi.core.common.util.system.ImageUtil
|
||||||
import java.nio.channels.SeekableByteChannel
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loader used to load a chapter from a .zip or .cbz file.
|
* Loader used to load a chapter from an archive file.
|
||||||
*/
|
*/
|
||||||
internal class ZipPageLoader(channel: SeekableByteChannel) : PageLoader() {
|
internal class ArchivePageLoader(private val reader: ArchiveReader) : PageLoader() {
|
||||||
|
|
||||||
private val zip = channel.toZipFile()
|
|
||||||
|
|
||||||
override var isLocal: Boolean = true
|
override var isLocal: Boolean = true
|
||||||
|
|
||||||
override suspend fun getPages(): List<ReaderPage> {
|
override suspend fun getPages(): List<ReaderPage> = reader.useEntries { entries ->
|
||||||
return zip.entries.asSequence()
|
entries
|
||||||
.filter { !it.isDirectory && ImageUtil.isImage(it.name) { zip.getInputStream(it) } }
|
.filter { it.isFile && ImageUtil.isImage(it.name) { reader.getInputStream(it.name)!! } }
|
||||||
.sortedWith { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }
|
.sortedWith { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }
|
||||||
.mapIndexed { i, entry ->
|
.mapIndexed { i, entry ->
|
||||||
ReaderPage(i).apply {
|
ReaderPage(i).apply {
|
||||||
stream = { zip.getInputStream(entry) }
|
stream = { reader.getInputStream(entry.name)!! }
|
||||||
status = Page.State.READY
|
status = Page.State.READY
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,6 +31,6 @@ internal class ZipPageLoader(channel: SeekableByteChannel) : PageLoader() {
|
||||||
|
|
||||||
override fun recycle() {
|
override fun recycle() {
|
||||||
super.recycle()
|
super.recycle()
|
||||||
zip.close()
|
reader.close()
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,14 +1,13 @@
|
||||||
package eu.kanade.tachiyomi.ui.reader.loader
|
package eu.kanade.tachiyomi.ui.reader.loader
|
||||||
|
|
||||||
import android.content.Context
|
import android.content.Context
|
||||||
import com.github.junrar.exception.UnsupportedRarV5Exception
|
|
||||||
import eu.kanade.tachiyomi.data.download.DownloadManager
|
import eu.kanade.tachiyomi.data.download.DownloadManager
|
||||||
import eu.kanade.tachiyomi.data.download.DownloadProvider
|
import eu.kanade.tachiyomi.data.download.DownloadProvider
|
||||||
import eu.kanade.tachiyomi.source.Source
|
import eu.kanade.tachiyomi.source.Source
|
||||||
import eu.kanade.tachiyomi.source.online.HttpSource
|
import eu.kanade.tachiyomi.source.online.HttpSource
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderChapter
|
import eu.kanade.tachiyomi.ui.reader.model.ReaderChapter
|
||||||
|
import mihon.core.common.archive.archiveReader
|
||||||
import tachiyomi.core.common.i18n.stringResource
|
import tachiyomi.core.common.i18n.stringResource
|
||||||
import tachiyomi.core.common.storage.openReadOnlyChannel
|
|
||||||
import tachiyomi.core.common.util.lang.withIOContext
|
import tachiyomi.core.common.util.lang.withIOContext
|
||||||
import tachiyomi.core.common.util.system.logcat
|
import tachiyomi.core.common.util.system.logcat
|
||||||
import tachiyomi.domain.manga.model.Manga
|
import tachiyomi.domain.manga.model.Manga
|
||||||
|
@ -95,13 +94,8 @@ class ChapterLoader(
|
||||||
source is LocalSource -> source.getFormat(chapter.chapter).let { format ->
|
source is LocalSource -> source.getFormat(chapter.chapter).let { format ->
|
||||||
when (format) {
|
when (format) {
|
||||||
is Format.Directory -> DirectoryPageLoader(format.file)
|
is Format.Directory -> DirectoryPageLoader(format.file)
|
||||||
is Format.Zip -> ZipPageLoader(format.file.openReadOnlyChannel(context))
|
is Format.Archive -> ArchivePageLoader(format.file.archiveReader(context))
|
||||||
is Format.Rar -> try {
|
is Format.Epub -> EpubPageLoader(format.file.archiveReader(context))
|
||||||
RarPageLoader(format.file.openInputStream())
|
|
||||||
} catch (e: UnsupportedRarV5Exception) {
|
|
||||||
error(context.stringResource(MR.strings.loader_rar5_error))
|
|
||||||
}
|
|
||||||
is Format.Epub -> EpubPageLoader(format.file.openReadOnlyChannel(context))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
source is HttpSource -> HttpPageLoader(chapter, source)
|
source is HttpSource -> HttpPageLoader(chapter, source)
|
||||||
|
|
|
@ -10,7 +10,7 @@ import eu.kanade.tachiyomi.source.Source
|
||||||
import eu.kanade.tachiyomi.source.model.Page
|
import eu.kanade.tachiyomi.source.model.Page
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderChapter
|
import eu.kanade.tachiyomi.ui.reader.model.ReaderChapter
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
||||||
import tachiyomi.core.common.storage.openReadOnlyChannel
|
import mihon.core.common.archive.archiveReader
|
||||||
import tachiyomi.domain.manga.model.Manga
|
import tachiyomi.domain.manga.model.Manga
|
||||||
import uy.kohesive.injekt.injectLazy
|
import uy.kohesive.injekt.injectLazy
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ internal class DownloadPageLoader(
|
||||||
|
|
||||||
private val context: Application by injectLazy()
|
private val context: Application by injectLazy()
|
||||||
|
|
||||||
private var zipPageLoader: ZipPageLoader? = null
|
private var archivePageLoader: ArchivePageLoader? = null
|
||||||
|
|
||||||
override var isLocal: Boolean = true
|
override var isLocal: Boolean = true
|
||||||
|
|
||||||
|
@ -43,11 +43,11 @@ internal class DownloadPageLoader(
|
||||||
|
|
||||||
override fun recycle() {
|
override fun recycle() {
|
||||||
super.recycle()
|
super.recycle()
|
||||||
zipPageLoader?.recycle()
|
archivePageLoader?.recycle()
|
||||||
}
|
}
|
||||||
|
|
||||||
private suspend fun getPagesFromArchive(file: UniFile): List<ReaderPage> {
|
private suspend fun getPagesFromArchive(file: UniFile): List<ReaderPage> {
|
||||||
val loader = ZipPageLoader(file.openReadOnlyChannel(context)).also { zipPageLoader = it }
|
val loader = ArchivePageLoader(file.archiveReader(context)).also { archivePageLoader = it }
|
||||||
return loader.getPages()
|
return loader.getPages()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,6 +63,6 @@ internal class DownloadPageLoader(
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun loadPage(page: ReaderPage) {
|
override suspend fun loadPage(page: ReaderPage) {
|
||||||
zipPageLoader?.loadPage(page)
|
archivePageLoader?.loadPage(page)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,21 +3,21 @@ package eu.kanade.tachiyomi.ui.reader.loader
|
||||||
import eu.kanade.tachiyomi.source.model.Page
|
import eu.kanade.tachiyomi.source.model.Page
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
||||||
import eu.kanade.tachiyomi.util.storage.EpubFile
|
import eu.kanade.tachiyomi.util.storage.EpubFile
|
||||||
import java.nio.channels.SeekableByteChannel
|
import mihon.core.common.archive.ArchiveReader
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loader used to load a chapter from a .epub file.
|
* Loader used to load a chapter from a .epub file.
|
||||||
*/
|
*/
|
||||||
internal class EpubPageLoader(channel: SeekableByteChannel) : PageLoader() {
|
internal class EpubPageLoader(reader: ArchiveReader) : PageLoader() {
|
||||||
|
|
||||||
private val epub = EpubFile(channel)
|
private val epub = EpubFile(reader)
|
||||||
|
|
||||||
override var isLocal: Boolean = true
|
override var isLocal: Boolean = true
|
||||||
|
|
||||||
override suspend fun getPages(): List<ReaderPage> {
|
override suspend fun getPages(): List<ReaderPage> {
|
||||||
return epub.getImagesFromPages()
|
return epub.getImagesFromPages()
|
||||||
.mapIndexed { i, path ->
|
.mapIndexed { i, path ->
|
||||||
val streamFn = { epub.getInputStream(epub.getEntry(path)!!) }
|
val streamFn = { epub.getInputStream(path)!! }
|
||||||
ReaderPage(i).apply {
|
ReaderPage(i).apply {
|
||||||
stream = streamFn
|
stream = streamFn
|
||||||
status = Page.State.READY
|
status = Page.State.READY
|
||||||
|
|
|
@ -1,67 +0,0 @@
|
||||||
package eu.kanade.tachiyomi.ui.reader.loader
|
|
||||||
|
|
||||||
import com.github.junrar.Archive
|
|
||||||
import com.github.junrar.rarfile.FileHeader
|
|
||||||
import eu.kanade.tachiyomi.source.model.Page
|
|
||||||
import eu.kanade.tachiyomi.ui.reader.model.ReaderPage
|
|
||||||
import eu.kanade.tachiyomi.util.lang.compareToCaseInsensitiveNaturalOrder
|
|
||||||
import tachiyomi.core.common.util.system.ImageUtil
|
|
||||||
import java.io.InputStream
|
|
||||||
import java.io.PipedInputStream
|
|
||||||
import java.io.PipedOutputStream
|
|
||||||
import java.util.concurrent.Executors
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Loader used to load a chapter from a .rar or .cbr file.
|
|
||||||
*/
|
|
||||||
internal class RarPageLoader(inputStream: InputStream) : PageLoader() {
|
|
||||||
|
|
||||||
private val rar = Archive(inputStream)
|
|
||||||
|
|
||||||
override var isLocal: Boolean = true
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pool for copying compressed files to an input stream.
|
|
||||||
*/
|
|
||||||
private val pool = Executors.newFixedThreadPool(1)
|
|
||||||
|
|
||||||
override suspend fun getPages(): List<ReaderPage> {
|
|
||||||
return rar.fileHeaders.asSequence()
|
|
||||||
.filter { !it.isDirectory && ImageUtil.isImage(it.fileName) { rar.getInputStream(it) } }
|
|
||||||
.sortedWith { f1, f2 -> f1.fileName.compareToCaseInsensitiveNaturalOrder(f2.fileName) }
|
|
||||||
.mapIndexed { i, header ->
|
|
||||||
ReaderPage(i).apply {
|
|
||||||
stream = { getStream(header) }
|
|
||||||
status = Page.State.READY
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.toList()
|
|
||||||
}
|
|
||||||
|
|
||||||
override suspend fun loadPage(page: ReaderPage) {
|
|
||||||
check(!isRecycled)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun recycle() {
|
|
||||||
super.recycle()
|
|
||||||
rar.close()
|
|
||||||
pool.shutdown()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns an input stream for the given [header].
|
|
||||||
*/
|
|
||||||
private fun getStream(header: FileHeader): InputStream {
|
|
||||||
val pipeIn = PipedInputStream()
|
|
||||||
val pipeOut = PipedOutputStream(pipeIn)
|
|
||||||
pool.execute {
|
|
||||||
try {
|
|
||||||
pipeOut.use {
|
|
||||||
rar.extractFile(header, it)
|
|
||||||
}
|
|
||||||
} catch (e: Exception) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pipeIn
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -32,7 +32,7 @@ dependencies {
|
||||||
implementation(libs.image.decoder)
|
implementation(libs.image.decoder)
|
||||||
|
|
||||||
implementation(libs.unifile)
|
implementation(libs.unifile)
|
||||||
implementation(libs.bundles.archive)
|
implementation(libs.libarchive)
|
||||||
|
|
||||||
api(kotlinx.coroutines.core)
|
api(kotlinx.coroutines.core)
|
||||||
api(kotlinx.serialization.json)
|
api(kotlinx.serialization.json)
|
||||||
|
|
|
@ -1,48 +1,27 @@
|
||||||
package eu.kanade.tachiyomi.util.storage
|
package eu.kanade.tachiyomi.util.storage
|
||||||
|
|
||||||
import mihon.core.common.extensions.toZipFile
|
import mihon.core.common.archive.ArchiveReader
|
||||||
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry
|
|
||||||
import org.jsoup.Jsoup
|
import org.jsoup.Jsoup
|
||||||
import org.jsoup.nodes.Document
|
import org.jsoup.nodes.Document
|
||||||
import java.io.Closeable
|
import java.io.Closeable
|
||||||
import java.io.File
|
import java.io.File
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.nio.channels.SeekableByteChannel
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wrapper over ZipFile to load files in epub format.
|
* Wrapper over ZipFile to load files in epub format.
|
||||||
*/
|
*/
|
||||||
class EpubFile(channel: SeekableByteChannel) : Closeable {
|
class EpubFile(private val reader: ArchiveReader) : Closeable by reader {
|
||||||
|
|
||||||
/**
|
|
||||||
* Zip file of this epub.
|
|
||||||
*/
|
|
||||||
private val zip = channel.toZipFile()
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Path separator used by this epub.
|
* Path separator used by this epub.
|
||||||
*/
|
*/
|
||||||
private val pathSeparator = getPathSeparator()
|
private val pathSeparator = getPathSeparator()
|
||||||
|
|
||||||
/**
|
|
||||||
* Closes the underlying zip file.
|
|
||||||
*/
|
|
||||||
override fun close() {
|
|
||||||
zip.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an input stream for reading the contents of the specified zip file entry.
|
* Returns an input stream for reading the contents of the specified zip file entry.
|
||||||
*/
|
*/
|
||||||
fun getInputStream(entry: ZipArchiveEntry): InputStream {
|
fun getInputStream(entryName: String): InputStream? {
|
||||||
return zip.getInputStream(entry)
|
return reader.getInputStream(entryName)
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the zip file entry for the specified name, or null if not found.
|
|
||||||
*/
|
|
||||||
fun getEntry(name: String): ZipArchiveEntry? {
|
|
||||||
return zip.getEntry(name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -59,9 +38,9 @@ class EpubFile(channel: SeekableByteChannel) : Closeable {
|
||||||
* Returns the path to the package document.
|
* Returns the path to the package document.
|
||||||
*/
|
*/
|
||||||
fun getPackageHref(): String {
|
fun getPackageHref(): String {
|
||||||
val meta = zip.getEntry(resolveZipPath("META-INF", "container.xml"))
|
val meta = getInputStream(resolveZipPath("META-INF", "container.xml"))
|
||||||
if (meta != null) {
|
if (meta != null) {
|
||||||
val metaDoc = zip.getInputStream(meta).use { Jsoup.parse(it, null, "") }
|
val metaDoc = meta.use { Jsoup.parse(it, null, "") }
|
||||||
val path = metaDoc.getElementsByTag("rootfile").first()?.attr("full-path")
|
val path = metaDoc.getElementsByTag("rootfile").first()?.attr("full-path")
|
||||||
if (path != null) {
|
if (path != null) {
|
||||||
return path
|
return path
|
||||||
|
@ -74,8 +53,7 @@ class EpubFile(channel: SeekableByteChannel) : Closeable {
|
||||||
* Returns the package document where all the files are listed.
|
* Returns the package document where all the files are listed.
|
||||||
*/
|
*/
|
||||||
fun getPackageDocument(ref: String): Document {
|
fun getPackageDocument(ref: String): Document {
|
||||||
val entry = zip.getEntry(ref)
|
return getInputStream(ref)!!.use { Jsoup.parse(it, null, "") }
|
||||||
return zip.getInputStream(entry).use { Jsoup.parse(it, null, "") }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -98,8 +76,7 @@ class EpubFile(channel: SeekableByteChannel) : Closeable {
|
||||||
val basePath = getParentDirectory(packageHref)
|
val basePath = getParentDirectory(packageHref)
|
||||||
pages.forEach { page ->
|
pages.forEach { page ->
|
||||||
val entryPath = resolveZipPath(basePath, page)
|
val entryPath = resolveZipPath(basePath, page)
|
||||||
val entry = zip.getEntry(entryPath)
|
val document = getInputStream(entryPath)!!.use { Jsoup.parse(it, null, "") }
|
||||||
val document = zip.getInputStream(entry).use { Jsoup.parse(it, null, "") }
|
|
||||||
val imageBasePath = getParentDirectory(entryPath)
|
val imageBasePath = getParentDirectory(entryPath)
|
||||||
|
|
||||||
document.allElements.forEach {
|
document.allElements.forEach {
|
||||||
|
@ -117,8 +94,9 @@ class EpubFile(channel: SeekableByteChannel) : Closeable {
|
||||||
* Returns the path separator used by the epub file.
|
* Returns the path separator used by the epub file.
|
||||||
*/
|
*/
|
||||||
private fun getPathSeparator(): String {
|
private fun getPathSeparator(): String {
|
||||||
val meta = zip.getEntry("META-INF\\container.xml")
|
val meta = getInputStream("META-INF\\container.xml")
|
||||||
return if (meta != null) {
|
return if (meta != null) {
|
||||||
|
meta.close()
|
||||||
"\\"
|
"\\"
|
||||||
} else {
|
} else {
|
||||||
"/"
|
"/"
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
package mihon.core.common.archive
|
||||||
|
|
||||||
|
class ArchiveEntry(
|
||||||
|
val name: String,
|
||||||
|
val isFile: Boolean,
|
||||||
|
)
|
|
@ -0,0 +1,52 @@
|
||||||
|
package mihon.core.common.archive
|
||||||
|
|
||||||
|
import me.zhanghai.android.libarchive.Archive
|
||||||
|
import me.zhanghai.android.libarchive.ArchiveEntry
|
||||||
|
import me.zhanghai.android.libarchive.ArchiveException
|
||||||
|
import java.io.InputStream
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
|
||||||
|
class ArchiveInputStream(buffer: Long, size: Long) : InputStream() {
|
||||||
|
private val archive = Archive.readNew()
|
||||||
|
|
||||||
|
init {
|
||||||
|
try {
|
||||||
|
Archive.setCharset(archive, Charsets.UTF_8.name().toByteArray())
|
||||||
|
Archive.readSupportFilterAll(archive)
|
||||||
|
Archive.readSupportFormatAll(archive)
|
||||||
|
Archive.readOpenMemoryUnsafe(archive, buffer, size)
|
||||||
|
} catch (e: ArchiveException) {
|
||||||
|
close()
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private val oneByteBuffer = ByteBuffer.allocateDirect(1)
|
||||||
|
|
||||||
|
override fun read(): Int {
|
||||||
|
read(oneByteBuffer)
|
||||||
|
return if (oneByteBuffer.hasRemaining()) oneByteBuffer.get().toUByte().toInt() else -1
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||||
|
val buffer = ByteBuffer.wrap(b, off, len)
|
||||||
|
read(buffer)
|
||||||
|
return if (buffer.hasRemaining()) buffer.remaining() else -1
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun read(buffer: ByteBuffer) {
|
||||||
|
buffer.clear()
|
||||||
|
Archive.readData(archive, buffer)
|
||||||
|
buffer.flip()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
Archive.readFree(archive)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getNextEntry() = Archive.readNextHeader(archive).takeUnless { it == 0L }?.let { entry ->
|
||||||
|
val name = ArchiveEntry.pathnameUtf8(entry) ?: ArchiveEntry.pathname(entry)?.decodeToString() ?: return null
|
||||||
|
val isFile = ArchiveEntry.filetype(entry) == ArchiveEntry.AE_IFREG
|
||||||
|
ArchiveEntry(name, isFile)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package mihon.core.common.archive
|
||||||
|
|
||||||
|
import android.content.Context
|
||||||
|
import android.os.ParcelFileDescriptor
|
||||||
|
import android.system.Os
|
||||||
|
import android.system.OsConstants
|
||||||
|
import com.hippo.unifile.UniFile
|
||||||
|
import me.zhanghai.android.libarchive.ArchiveException
|
||||||
|
import tachiyomi.core.common.storage.openFileDescriptor
|
||||||
|
import java.io.Closeable
|
||||||
|
import java.io.InputStream
|
||||||
|
|
||||||
|
class ArchiveReader(pfd: ParcelFileDescriptor) : Closeable {
|
||||||
|
val size = pfd.statSize
|
||||||
|
val address = Os.mmap(0, size, OsConstants.PROT_READ, OsConstants.MAP_PRIVATE, pfd.fileDescriptor, 0)
|
||||||
|
|
||||||
|
inline fun <T> useEntries(block: (Sequence<ArchiveEntry>) -> T): T =
|
||||||
|
ArchiveInputStream(address, size).use { block(generateSequence { it.getNextEntry() }) }
|
||||||
|
|
||||||
|
fun getInputStream(entryName: String): InputStream? {
|
||||||
|
val archive = ArchiveInputStream(address, size)
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
val entry = archive.getNextEntry() ?: break
|
||||||
|
if (entry.name == entryName) {
|
||||||
|
return archive
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: ArchiveException) {
|
||||||
|
archive.close()
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
archive.close()
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
Os.munmap(address, size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun UniFile.archiveReader(context: Context) = openFileDescriptor(context, "r").use { ArchiveReader(it) }
|
|
@ -0,0 +1,74 @@
|
||||||
|
package mihon.core.common.archive
|
||||||
|
|
||||||
|
import android.content.Context
|
||||||
|
import android.system.Os
|
||||||
|
import android.system.StructStat
|
||||||
|
import com.hippo.unifile.UniFile
|
||||||
|
import me.zhanghai.android.libarchive.Archive
|
||||||
|
import me.zhanghai.android.libarchive.ArchiveEntry
|
||||||
|
import me.zhanghai.android.libarchive.ArchiveException
|
||||||
|
import tachiyomi.core.common.storage.openFileDescriptor
|
||||||
|
import java.io.Closeable
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
|
||||||
|
class ZipWriter(val context: Context, file: UniFile) : Closeable {
|
||||||
|
private val pfd = file.openFileDescriptor(context, "wt")
|
||||||
|
private val archive = Archive.writeNew()
|
||||||
|
private val entry = ArchiveEntry.new2(archive)
|
||||||
|
private val buffer = ByteBuffer.allocateDirect(8192)
|
||||||
|
|
||||||
|
init {
|
||||||
|
try {
|
||||||
|
Archive.setCharset(archive, Charsets.UTF_8.name().toByteArray())
|
||||||
|
Archive.writeSetFormatZip(archive)
|
||||||
|
Archive.writeZipSetCompressionStore(archive)
|
||||||
|
Archive.writeOpenFd(archive, pfd.fd)
|
||||||
|
} catch (e: ArchiveException) {
|
||||||
|
close()
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun write(file: UniFile) {
|
||||||
|
file.openFileDescriptor(context, "r").use {
|
||||||
|
val fd = it.fileDescriptor
|
||||||
|
ArchiveEntry.clear(entry)
|
||||||
|
ArchiveEntry.setPathnameUtf8(entry, file.name)
|
||||||
|
val stat = Os.fstat(fd)
|
||||||
|
ArchiveEntry.setStat(entry, stat.toArchiveStat())
|
||||||
|
Archive.writeHeader(archive, entry)
|
||||||
|
while (true) {
|
||||||
|
buffer.clear()
|
||||||
|
Os.read(fd, buffer)
|
||||||
|
if (buffer.position() == 0) break
|
||||||
|
buffer.flip()
|
||||||
|
Archive.writeData(archive, buffer)
|
||||||
|
}
|
||||||
|
Archive.writeFinishEntry(archive)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
ArchiveEntry.free(entry)
|
||||||
|
Archive.writeFree(archive)
|
||||||
|
pfd.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun StructStat.toArchiveStat() = ArchiveEntry.StructStat().apply {
|
||||||
|
stDev = st_dev
|
||||||
|
stMode = st_mode
|
||||||
|
stNlink = st_nlink.toInt()
|
||||||
|
stUid = st_uid
|
||||||
|
stGid = st_gid
|
||||||
|
stRdev = st_rdev
|
||||||
|
stSize = st_size
|
||||||
|
stBlksize = st_blksize
|
||||||
|
stBlocks = st_blocks
|
||||||
|
stAtim = timespec(st_atime)
|
||||||
|
stMtim = timespec(st_mtime)
|
||||||
|
stCtim = timespec(st_ctime)
|
||||||
|
stIno = st_ino
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun timespec(tvSec: Long) = ArchiveEntry.StructTimespec().also { it.tvSec = tvSec }
|
|
@ -1,8 +0,0 @@
|
||||||
package mihon.core.common.extensions
|
|
||||||
|
|
||||||
import org.apache.commons.compress.archivers.zip.ZipFile
|
|
||||||
import java.nio.channels.SeekableByteChannel
|
|
||||||
|
|
||||||
fun SeekableByteChannel.toZipFile(): ZipFile {
|
|
||||||
return ZipFile.Builder().setSeekableByteChannel(this).get()
|
|
||||||
}
|
|
|
@ -3,7 +3,6 @@ package tachiyomi.core.common.storage
|
||||||
import android.content.Context
|
import android.content.Context
|
||||||
import android.os.ParcelFileDescriptor
|
import android.os.ParcelFileDescriptor
|
||||||
import com.hippo.unifile.UniFile
|
import com.hippo.unifile.UniFile
|
||||||
import java.nio.channels.FileChannel
|
|
||||||
|
|
||||||
val UniFile.extension: String?
|
val UniFile.extension: String?
|
||||||
get() = name?.substringAfterLast('.')
|
get() = name?.substringAfterLast('.')
|
||||||
|
@ -14,6 +13,5 @@ val UniFile.nameWithoutExtension: String?
|
||||||
val UniFile.displayablePath: String
|
val UniFile.displayablePath: String
|
||||||
get() = filePath ?: uri.toString()
|
get() = filePath ?: uri.toString()
|
||||||
|
|
||||||
fun UniFile.openReadOnlyChannel(context: Context): FileChannel {
|
fun UniFile.openFileDescriptor(context: Context, mode: String): ParcelFileDescriptor =
|
||||||
return ParcelFileDescriptor.AutoCloseInputStream(context.contentResolver.openFileDescriptor(uri, "r")).channel
|
context.contentResolver.openFileDescriptor(uri, mode) ?: error("Failed to open file descriptor: $displayablePath")
|
||||||
}
|
|
||||||
|
|
|
@ -32,8 +32,7 @@ jsoup = "org.jsoup:jsoup:1.17.2"
|
||||||
|
|
||||||
disklrucache = "com.jakewharton:disklrucache:2.0.2"
|
disklrucache = "com.jakewharton:disklrucache:2.0.2"
|
||||||
unifile = "com.github.tachiyomiorg:unifile:e0def6b3dc"
|
unifile = "com.github.tachiyomiorg:unifile:e0def6b3dc"
|
||||||
common-compress = "org.apache.commons:commons-compress:1.26.2"
|
libarchive = "me.zhanghai.android.libarchive:library:1.1.0"
|
||||||
junrar = "com.github.junrar:junrar:7.5.5"
|
|
||||||
|
|
||||||
sqlite-framework = { module = "androidx.sqlite:sqlite-framework", version.ref = "sqlite" }
|
sqlite-framework = { module = "androidx.sqlite:sqlite-framework", version.ref = "sqlite" }
|
||||||
sqlite-ktx = { module = "androidx.sqlite:sqlite-ktx", version.ref = "sqlite" }
|
sqlite-ktx = { module = "androidx.sqlite:sqlite-ktx", version.ref = "sqlite" }
|
||||||
|
@ -104,7 +103,6 @@ detekt-rules-formatting = { module = "io.gitlab.arturbosch.detekt:detekt-formatt
|
||||||
detekt-rules-compose = { module = "io.nlopez.compose.rules:detekt", version.ref = "detektCompose" }
|
detekt-rules-compose = { module = "io.nlopez.compose.rules:detekt", version.ref = "detektCompose" }
|
||||||
|
|
||||||
[bundles]
|
[bundles]
|
||||||
archive = ["common-compress", "junrar"]
|
|
||||||
okhttp = ["okhttp-core", "okhttp-logging", "okhttp-brotli", "okhttp-dnsoverhttps"]
|
okhttp = ["okhttp-core", "okhttp-logging", "okhttp-brotli", "okhttp-dnsoverhttps"]
|
||||||
js-engine = ["quickjs-android"]
|
js-engine = ["quickjs-android"]
|
||||||
sqlite = ["sqlite-framework", "sqlite-ktx", "sqlite-android"]
|
sqlite = ["sqlite-framework", "sqlite-ktx", "sqlite-android"]
|
||||||
|
|
|
@ -781,7 +781,6 @@
|
||||||
<string name="transition_pages_error">Failed to load pages: %1$s</string>
|
<string name="transition_pages_error">Failed to load pages: %1$s</string>
|
||||||
<string name="page_list_empty_error">No pages found</string>
|
<string name="page_list_empty_error">No pages found</string>
|
||||||
<string name="loader_not_implemented_error">Source not found</string>
|
<string name="loader_not_implemented_error">Source not found</string>
|
||||||
<string name="loader_rar5_error">RARv5 format is not supported</string>
|
|
||||||
|
|
||||||
<!-- Updates -->
|
<!-- Updates -->
|
||||||
<string name="updating_library">Updating library</string>
|
<string name="updating_library">Updating library</string>
|
||||||
|
|
|
@ -12,7 +12,6 @@ kotlin {
|
||||||
api(projects.i18n)
|
api(projects.i18n)
|
||||||
|
|
||||||
implementation(libs.unifile)
|
implementation(libs.unifile)
|
||||||
implementation(libs.bundles.archive)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val androidMain by getting {
|
val androidMain by getting {
|
||||||
|
|
|
@ -17,13 +17,12 @@ import kotlinx.coroutines.awaitAll
|
||||||
import kotlinx.serialization.json.Json
|
import kotlinx.serialization.json.Json
|
||||||
import kotlinx.serialization.json.decodeFromStream
|
import kotlinx.serialization.json.decodeFromStream
|
||||||
import logcat.LogPriority
|
import logcat.LogPriority
|
||||||
import mihon.core.common.extensions.toZipFile
|
import mihon.core.common.archive.archiveReader
|
||||||
import nl.adaptivity.xmlutil.AndroidXmlReader
|
import nl.adaptivity.xmlutil.AndroidXmlReader
|
||||||
import nl.adaptivity.xmlutil.serialization.XML
|
import nl.adaptivity.xmlutil.serialization.XML
|
||||||
import tachiyomi.core.common.i18n.stringResource
|
import tachiyomi.core.common.i18n.stringResource
|
||||||
import tachiyomi.core.common.storage.extension
|
import tachiyomi.core.common.storage.extension
|
||||||
import tachiyomi.core.common.storage.nameWithoutExtension
|
import tachiyomi.core.common.storage.nameWithoutExtension
|
||||||
import tachiyomi.core.common.storage.openReadOnlyChannel
|
|
||||||
import tachiyomi.core.common.util.lang.withIOContext
|
import tachiyomi.core.common.util.lang.withIOContext
|
||||||
import tachiyomi.core.common.util.system.ImageUtil
|
import tachiyomi.core.common.util.system.ImageUtil
|
||||||
import tachiyomi.core.common.util.system.logcat
|
import tachiyomi.core.common.util.system.logcat
|
||||||
|
@ -45,7 +44,6 @@ import uy.kohesive.injekt.injectLazy
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.nio.charset.StandardCharsets
|
import java.nio.charset.StandardCharsets
|
||||||
import kotlin.time.Duration.Companion.days
|
import kotlin.time.Duration.Companion.days
|
||||||
import com.github.junrar.Archive as JunrarArchive
|
|
||||||
import tachiyomi.domain.source.model.Source as DomainSource
|
import tachiyomi.domain.source.model.Source as DomainSource
|
||||||
|
|
||||||
actual class LocalSource(
|
actual class LocalSource(
|
||||||
|
@ -187,9 +185,7 @@ actual class LocalSource(
|
||||||
|
|
||||||
// Copy ComicInfo.xml from chapter archive to top level if found
|
// Copy ComicInfo.xml from chapter archive to top level if found
|
||||||
noXmlFile == null -> {
|
noXmlFile == null -> {
|
||||||
val chapterArchives = mangaDirFiles
|
val chapterArchives = mangaDirFiles.filter(Archive::isSupported)
|
||||||
.filter(Archive::isSupported)
|
|
||||||
.toList()
|
|
||||||
|
|
||||||
val copiedFile = copyComicInfoFileFromArchive(chapterArchives, mangaDir)
|
val copiedFile = copyComicInfoFileFromArchive(chapterArchives, mangaDir)
|
||||||
if (copiedFile != null) {
|
if (copiedFile != null) {
|
||||||
|
@ -209,26 +205,10 @@ actual class LocalSource(
|
||||||
|
|
||||||
private fun copyComicInfoFileFromArchive(chapterArchives: List<UniFile>, folder: UniFile): UniFile? {
|
private fun copyComicInfoFileFromArchive(chapterArchives: List<UniFile>, folder: UniFile): UniFile? {
|
||||||
for (chapter in chapterArchives) {
|
for (chapter in chapterArchives) {
|
||||||
when (Format.valueOf(chapter)) {
|
chapter.archiveReader(context).use { reader ->
|
||||||
is Format.Zip -> {
|
reader.getInputStream(COMIC_INFO_FILE)?.use { stream ->
|
||||||
chapter.openReadOnlyChannel(context).toZipFile().use { zip ->
|
return copyComicInfoFile(stream, folder)
|
||||||
zip.getEntry(COMIC_INFO_FILE)?.let { comicInfoFile ->
|
|
||||||
zip.getInputStream(comicInfoFile).buffered().use { stream ->
|
|
||||||
return copyComicInfoFile(stream, folder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
is Format.Rar -> {
|
|
||||||
JunrarArchive(chapter.openInputStream()).use { rar ->
|
|
||||||
rar.fileHeaders.firstOrNull { it.fileName == COMIC_INFO_FILE }?.let { comicInfoFile ->
|
|
||||||
rar.getInputStream(comicInfoFile).buffered().use { stream ->
|
|
||||||
return copyComicInfoFile(stream, folder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else -> {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
|
@ -254,7 +234,7 @@ actual class LocalSource(
|
||||||
override suspend fun getChapterList(manga: SManga): List<SChapter> = withIOContext {
|
override suspend fun getChapterList(manga: SManga): List<SChapter> = withIOContext {
|
||||||
val chapters = fileSystem.getFilesInMangaDirectory(manga.url)
|
val chapters = fileSystem.getFilesInMangaDirectory(manga.url)
|
||||||
// Only keep supported formats
|
// Only keep supported formats
|
||||||
.filter { it.isDirectory || Archive.isSupported(it) }
|
.filter { it.isDirectory || Archive.isSupported(it) || it.extension.equals("epub", true) }
|
||||||
.map { chapterFile ->
|
.map { chapterFile ->
|
||||||
SChapter.create().apply {
|
SChapter.create().apply {
|
||||||
url = "${manga.url}/${chapterFile.name}"
|
url = "${manga.url}/${chapterFile.name}"
|
||||||
|
@ -270,7 +250,7 @@ actual class LocalSource(
|
||||||
|
|
||||||
val format = Format.valueOf(chapterFile)
|
val format = Format.valueOf(chapterFile)
|
||||||
if (format is Format.Epub) {
|
if (format is Format.Epub) {
|
||||||
EpubFile(format.file.openReadOnlyChannel(context)).use { epub ->
|
EpubFile(format.file.archiveReader(context)).use { epub ->
|
||||||
epub.fillMetadata(manga, this)
|
epub.fillMetadata(manga, this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,31 +308,22 @@ actual class LocalSource(
|
||||||
|
|
||||||
entry?.let { coverManager.update(manga, it.openInputStream()) }
|
entry?.let { coverManager.update(manga, it.openInputStream()) }
|
||||||
}
|
}
|
||||||
is Format.Zip -> {
|
is Format.Archive -> {
|
||||||
format.file.openReadOnlyChannel(context).toZipFile().use { zip ->
|
format.file.archiveReader(context).use { reader ->
|
||||||
val entry = zip.entries.toList()
|
val entry = reader.useEntries { entries ->
|
||||||
.sortedWith { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }
|
entries
|
||||||
.find { !it.isDirectory && ImageUtil.isImage(it.name) { zip.getInputStream(it) } }
|
.sortedWith { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }
|
||||||
|
.find { it.isFile && ImageUtil.isImage(it.name) { reader.getInputStream(it.name)!! } }
|
||||||
|
}
|
||||||
|
|
||||||
entry?.let { coverManager.update(manga, zip.getInputStream(it)) }
|
entry?.let { coverManager.update(manga, reader.getInputStream(it.name)!!) }
|
||||||
}
|
|
||||||
}
|
|
||||||
is Format.Rar -> {
|
|
||||||
JunrarArchive(format.file.openInputStream()).use { archive ->
|
|
||||||
val entry = archive.fileHeaders
|
|
||||||
.sortedWith { f1, f2 -> f1.fileName.compareToCaseInsensitiveNaturalOrder(f2.fileName) }
|
|
||||||
.find { !it.isDirectory && ImageUtil.isImage(it.fileName) { archive.getInputStream(it) } }
|
|
||||||
|
|
||||||
entry?.let { coverManager.update(manga, archive.getInputStream(it)) }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is Format.Epub -> {
|
is Format.Epub -> {
|
||||||
EpubFile(format.file.openReadOnlyChannel(context)).use { epub ->
|
EpubFile(format.file.archiveReader(context)).use { epub ->
|
||||||
val entry = epub.getImagesFromPages()
|
val entry = epub.getImagesFromPages().firstOrNull()
|
||||||
.firstOrNull()
|
|
||||||
?.let { epub.getEntry(it) }
|
|
||||||
|
|
||||||
entry?.let { coverManager.update(manga, epub.getInputStream(it)) }
|
entry?.let { coverManager.update(manga, epub.getInputStream(it)!!) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,9 @@ import tachiyomi.core.common.storage.extension
|
||||||
|
|
||||||
object Archive {
|
object Archive {
|
||||||
|
|
||||||
private val SUPPORTED_ARCHIVE_TYPES = listOf("zip", "cbz", "rar", "cbr", "epub")
|
private val SUPPORTED_ARCHIVE_TYPES = listOf("zip", "cbz", "rar", "cbr", "7z", "cb7", "tar", "cbt")
|
||||||
|
|
||||||
fun isSupported(file: UniFile): Boolean {
|
fun isSupported(file: UniFile): Boolean {
|
||||||
return file.extension in SUPPORTED_ARCHIVE_TYPES
|
return file.extension?.lowercase() in SUPPORTED_ARCHIVE_TYPES
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,25 +2,22 @@ package tachiyomi.source.local.io
|
||||||
|
|
||||||
import com.hippo.unifile.UniFile
|
import com.hippo.unifile.UniFile
|
||||||
import tachiyomi.core.common.storage.extension
|
import tachiyomi.core.common.storage.extension
|
||||||
|
import tachiyomi.source.local.io.Archive.isSupported as isArchiveSupported
|
||||||
|
|
||||||
sealed interface Format {
|
sealed interface Format {
|
||||||
data class Directory(val file: UniFile) : Format
|
data class Directory(val file: UniFile) : Format
|
||||||
data class Zip(val file: UniFile) : Format
|
data class Archive(val file: UniFile) : Format
|
||||||
data class Rar(val file: UniFile) : Format
|
|
||||||
data class Epub(val file: UniFile) : Format
|
data class Epub(val file: UniFile) : Format
|
||||||
|
|
||||||
class UnknownFormatException : Exception()
|
class UnknownFormatException : Exception()
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
|
|
||||||
fun valueOf(file: UniFile) = with(file) {
|
fun valueOf(file: UniFile) = when {
|
||||||
when {
|
file.isDirectory -> Directory(file)
|
||||||
isDirectory -> Directory(this)
|
file.extension.equals("epub", true) -> Epub(file)
|
||||||
extension.equals("zip", true) || extension.equals("cbz", true) -> Zip(this)
|
isArchiveSupported(file) -> Archive(file)
|
||||||
extension.equals("rar", true) || extension.equals("cbr", true) -> Rar(this)
|
else -> throw UnknownFormatException()
|
||||||
extension.equals("epub", true) -> Epub(this)
|
|
||||||
else -> throw UnknownFormatException()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue