diff --git a/.idea/misc.xml b/.idea/misc.xml index 0ad17cb..8978d23 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -1,4 +1,3 @@ - diff --git a/README.md b/README.md index 37e2ef9..f5bd3c9 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,6 @@ # AssimilateTranslate -翻译 \ No newline at end of file +在线 +语言识别 +语音翻译 +拍照翻译 diff --git a/app/build.gradle.kts b/app/build.gradle.kts index fac17a6..91db46c 100644 --- a/app/build.gradle.kts +++ b/app/build.gradle.kts @@ -9,7 +9,7 @@ android { defaultConfig { applicationId = "com.assimilate.alltrans" - minSdk = 24 + minSdk = 23 targetSdk = 34 versionCode = 1 versionName = "1.0" @@ -17,7 +17,15 @@ android { testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" } + buildFeatures { + buildConfig = true + viewBinding = true + } + buildTypes { + + debug { } + release { isMinifyEnabled = false proguardFiles( @@ -37,6 +45,44 @@ android { dependencies { + // To recognize Latin script + implementation("com.google.mlkit:text-recognition:16.0.0") + // To recognize Chinese script + implementation("com.google.mlkit:text-recognition-chinese:16.0.0") + // To recognize Devanagari script + implementation("com.google.mlkit:text-recognition-devanagari:16.0.0") + // To recognize Japanese script + implementation("com.google.mlkit:text-recognition-japanese:16.0.0") + // To recognize Korean script + implementation("com.google.mlkit:text-recognition-korean:16.0.0") + + // CameraX + implementation(libs.androidx.camera.camera2) + implementation(libs.androidx.camera.lifecycle) + implementation(libs.androidx.camera.view) + + // 文本识别 + // To recognize Latin script +// implementation(libs.play.services.mlkit.text.recognition) +// // To recognize Chinese script +// implementation(libs.play.services.mlkit.text.recognition.chinese) +// // To recognize Devanagari script +// implementation(libs.play.services.mlkit.text.recognition.devanagari) +// // To recognize Japanese script +// implementation(libs.play.services.mlkit.text.recognition.japanese) +// // To recognize Korean script +// implementation(libs.play.services.mlkit.text.recognition.korean) + + + // other + implementation(libs.guava) + implementation(libs.retrofit) + implementation(libs.converter.gson) + implementation(libs.gson) + implementation(libs.lottie) + implementation(libs.glide) + + implementation(libs.androidx.core.ktx) implementation(libs.androidx.appcompat) implementation(libs.material) diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml index 1eba554..f77dee7 100644 --- a/app/src/main/AndroidManifest.xml +++ b/app/src/main/AndroidManifest.xml @@ -2,6 +2,17 @@ + + + + + + + + + + + + + + + + @@ -21,6 +51,7 @@ + \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/adapters/ContainerAdapter.java b/app/src/main/java/com/assimilate/alltrans/adapters/ContainerAdapter.java new file mode 100644 index 0000000..ec05c05 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/adapters/ContainerAdapter.java @@ -0,0 +1,33 @@ +package com.assimilate.alltrans.adapters; + +import androidx.annotation.NonNull; +import androidx.fragment.app.Fragment; +import androidx.fragment.app.FragmentActivity; +import androidx.viewpager2.adapter.FragmentStateAdapter; + +import java.util.ArrayList; + +public class ContainerAdapter extends FragmentStateAdapter { + + private final ArrayList fragments; + + public ContainerAdapter(@NonNull FragmentActivity fragmentActivity, @NonNull ArrayList list) { + super(fragmentActivity); + + this.fragments = new ArrayList<>(); + if (!list.isEmpty()) { + this.fragments.addAll(list); + } + } + + @NonNull + @Override + public Fragment createFragment(int position) { + return fragments.get(position); + } + + @Override + public int getItemCount() { + return fragments.size(); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/adapters/LanguageAdapter.java b/app/src/main/java/com/assimilate/alltrans/adapters/LanguageAdapter.java new file mode 100644 index 0000000..9931619 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/adapters/LanguageAdapter.java @@ -0,0 +1,105 @@ +package com.assimilate.alltrans.adapters; + +import android.app.Activity; +import android.text.TextUtils; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.ImageView; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.RecyclerView; + +import com.assimilate.alltrans.R; +import com.assimilate.alltrans.common.Language; +import com.assimilate.alltrans.databinding.LanguageItemLayoutBinding; +import com.bumptech.glide.Glide; +import com.bumptech.glide.request.RequestOptions; + +import java.util.ArrayList; + +public class LanguageAdapter extends RecyclerView.Adapter { + private final Activity mActivity; + private final ArrayList languages; + private final OnClickListener listener; + private final RequestOptions options; + + private boolean sorting = false; + + public LanguageAdapter(@NonNull final Activity activity, @NonNull final ArrayList languageList, final OnClickListener onClickListener) { + this.mActivity = activity; + this.languages = new ArrayList(); + this.listener = onClickListener; + + if (!languageList.isEmpty()) { + languages.addAll(languageList); + } + + options = new RequestOptions() + .placeholder(R.mipmap.ic_launcher) + .error(R.mipmap.ic_launcher) + .fitCenter(); + } + + @NonNull + @Override + public LanguageHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + return new LanguageHolder(LanguageItemLayoutBinding.inflate(LayoutInflater.from(parent.getContext()), parent, false)); + } + + @Override + public void onBindViewHolder(@NonNull LanguageHolder holder, int position) { + final Language currentLanguage = languages.get(position); + + if (null != currentLanguage) { + drawImage(currentLanguage.mFlagUrl, holder.mBinding.languageFlag); + if (!TextUtils.isEmpty(currentLanguage.getLanguage())) { + holder.mBinding.language.setText(currentLanguage.getLanguage()); + } else { + holder.mBinding.language.setText(mActivity.getString(R.string.app_name)); + } + } + + holder.mBinding.getRoot().setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + final int position = holder.getAdapterPosition(); + if (null != listener) + listener.click(position, currentLanguage); + } + }); + } + + @Override + public int getItemCount() { + return languages.size(); + } + + private void drawImage(@NonNull final String url, @NonNull final ImageView view) { + if (mActivity.isFinishing() || mActivity.isDestroyed()) return; + if (TextUtils.isEmpty(url)) { + Glide.with(mActivity) + .applyDefaultRequestOptions(options) + .load(R.mipmap.ic_launcher) + .into(view); + } else { + Glide.with(mActivity) + .applyDefaultRequestOptions(options) + .load(url) + .into(view); + } + } + + public static class LanguageHolder extends RecyclerView.ViewHolder { + public LanguageItemLayoutBinding mBinding; + + public LanguageHolder(@NonNull LanguageItemLayoutBinding binding) { + super(binding.getRoot()); + this.mBinding = binding; + } + } + + public interface OnClickListener { + void click(int position, Language language); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/adapters/TranslationAdapter.java b/app/src/main/java/com/assimilate/alltrans/adapters/TranslationAdapter.java new file mode 100644 index 0000000..e527387 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/adapters/TranslationAdapter.java @@ -0,0 +1,125 @@ +package com.assimilate.alltrans.adapters; + +import android.text.TextUtils; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.CompoundButton; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.RecyclerView; + +import com.assimilate.alltrans.databinding.LayoutItemTranslationBinding; +import com.assimilate.alltrans.mydb.Translations; + +import java.util.ArrayList; + +public class TranslationAdapter extends RecyclerView.Adapter { + private final ArrayList items; + private final TranslationItemCallback callback; + + public TranslationAdapter(@NonNull final ArrayList list, @NonNull final TranslationItemCallback callback) { + this.callback = callback; + this.items = new ArrayList<>(); + + if (!list.isEmpty()) { + for (Translations translations : list) { + items.add(new TranslationItem( + translations.getId(), translations.getSourceLanguage(), translations.getSourceTxt(), + translations.getTargetLanguage(), translations.getTargetTxt(), translations.getCurrentTimeMillis(), + translations.getExist(), translations.getCollection() + )); + } + } + } + + @NonNull + @Override + public TranslationHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + return new TranslationHolder(LayoutItemTranslationBinding.inflate(LayoutInflater.from(parent.getContext()), parent, false)); + } + + @Override + public void onBindViewHolder(@NonNull TranslationHolder holder, int position) { + final TranslationItem item = items.get(position); + + holder.mBinding.speakTarget.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + final String speech = item.getTargetTxt(); + if (!TextUtils.isEmpty(speech)) { + callback.speech(speech); + } + } + }); + holder.mBinding.checkboxCurrent.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { + @Override + public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { + item.currentChecked = isChecked; + + if (isChecked) { + callback.updateList(Operation.ADD, item.getId(), holder.getAdapterPosition()); + } else { + callback.updateList(Operation.REMOVE, item.getId(), holder.getAdapterPosition()); + } + } + }); + + final String itemSourceTxt = item.getSourceTxt(); + if (!TextUtils.isEmpty(itemSourceTxt)) { + holder.mBinding.tvSource.setText(itemSourceTxt); + } + + final String targetTxt = item.getTargetTxt(); + if (!TextUtils.isEmpty(targetTxt)) { + holder.mBinding.tvTarget.setText(targetTxt); + } + + holder.mBinding.checkboxCurrent.setChecked(item.currentChecked); + } + + @Override + public int getItemCount() { + return items.size(); + } + + public void updateSet(ArrayList indexes) { + if (null != indexes && !indexes.isEmpty()) { + for (Integer index : indexes) { + items.remove(index.intValue()); + notifyItemRemoved(index); + } + } + } + + + static class TranslationHolder extends RecyclerView.ViewHolder { + private LayoutItemTranslationBinding mBinding; + + public TranslationHolder(@NonNull LayoutItemTranslationBinding binding) { + super(binding.getRoot()); + mBinding = binding; + } + } + + static class TranslationItem extends Translations { + public boolean currentChecked = false; + + public TranslationItem(long id, String sourceLanguage, String sourceTxt, String targetLanguage, String targetTxt, long currentTimeMillis, int exist, int collection) { + super(id, sourceLanguage, sourceTxt, targetLanguage, targetTxt, currentTimeMillis, exist, collection); + } + + public TranslationItem(String sourceLanguage, String sourceTxt, String targetLanguage, String targetTxt) { + super(sourceLanguage, sourceTxt, targetLanguage, targetTxt); + } + } + + public interface TranslationItemCallback { + void updateList(Operation operation, long id, int position); + void speech(String value); + } + + public enum Operation{ + REMOVE, ADD + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/BitmapUtils.java b/app/src/main/java/com/assimilate/alltrans/common/BitmapUtils.java new file mode 100755 index 0000000..42f32a4 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/BitmapUtils.java @@ -0,0 +1,284 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +import android.content.ContentResolver; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.ImageFormat; +import android.graphics.Matrix; +import android.graphics.Rect; +import android.graphics.YuvImage; +import android.media.Image; +import android.media.Image.Plane; +import android.net.Uri; +import android.os.Build.VERSION_CODES; +import android.provider.MediaStore; +import android.util.Log; + +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.camera.core.ExperimentalGetImage; +import androidx.camera.core.ImageProxy; +import androidx.exifinterface.media.ExifInterface; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +/** Utils functions for bitmap conversions. */ +public class BitmapUtils { + private static final String TAG = "BitmapUtils"; + + /** Converts NV21 format byte buffer to bitmap. */ + @Nullable + public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) { + data.rewind(); + byte[] imageInBuffer = new byte[data.limit()]; + data.get(imageInBuffer, 0, imageInBuffer.length); + try { + YuvImage image = + new YuvImage( + imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream); + + Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()); + + stream.close(); + return rotateBitmap(bmp, metadata.getRotation(), false, false); + } catch (Exception e) { + Log.e("VisionProcessorBase", "Error: " + e.getMessage()); + } + return null; + } + + /** Converts a YUV_420_888 image from CameraX API to a bitmap. */ + @RequiresApi(VERSION_CODES.LOLLIPOP) + @Nullable + @ExperimentalGetImage + public static Bitmap getBitmap(ImageProxy image) { + FrameMetadata frameMetadata = + new FrameMetadata.Builder() + .setWidth(image.getWidth()) + .setHeight(image.getHeight()) + .setRotation(image.getImageInfo().getRotationDegrees()) + .build(); + + ByteBuffer nv21Buffer = + yuv420ThreePlanesToNV21(image.getImage().getPlanes(), image.getWidth(), image.getHeight()); + return getBitmap(nv21Buffer, frameMetadata); + } + + /** Rotates a bitmap if it is converted from a bytebuffer. */ + private static Bitmap rotateBitmap( + Bitmap bitmap, int rotationDegrees, boolean flipX, boolean flipY) { + Matrix matrix = new Matrix(); + + // Rotate the image back to straight. + matrix.postRotate(rotationDegrees); + + // Mirror the image along the X or Y axis. + matrix.postScale(flipX ? -1.0f : 1.0f, flipY ? -1.0f : 1.0f); + Bitmap rotatedBitmap = + Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); + + // Recycle the old bitmap if it has changed. + if (rotatedBitmap != bitmap) { + bitmap.recycle(); + } + return rotatedBitmap; + } + + @Nullable + public static Bitmap getBitmapFromContentUri(ContentResolver contentResolver, Uri imageUri) + throws IOException { + Bitmap decodedBitmap = MediaStore.Images.Media.getBitmap(contentResolver, imageUri); + if (decodedBitmap == null) { + return null; + } + int orientation = getExifOrientationTag(contentResolver, imageUri); + + int rotationDegrees = 0; + boolean flipX = false; + boolean flipY = false; + // See e.g. https://magnushoff.com/articles/jpeg-orientation/ for a detailed explanation on each + // orientation. + switch (orientation) { + case ExifInterface.ORIENTATION_FLIP_HORIZONTAL: + flipX = true; + break; + case ExifInterface.ORIENTATION_ROTATE_90: + rotationDegrees = 90; + break; + case ExifInterface.ORIENTATION_TRANSPOSE: + rotationDegrees = 90; + flipX = true; + break; + case ExifInterface.ORIENTATION_ROTATE_180: + rotationDegrees = 180; + break; + case ExifInterface.ORIENTATION_FLIP_VERTICAL: + flipY = true; + break; + case ExifInterface.ORIENTATION_ROTATE_270: + rotationDegrees = -90; + break; + case ExifInterface.ORIENTATION_TRANSVERSE: + rotationDegrees = -90; + flipX = true; + break; + case ExifInterface.ORIENTATION_UNDEFINED: + case ExifInterface.ORIENTATION_NORMAL: + default: + // No transformations necessary in this case. + } + + return rotateBitmap(decodedBitmap, rotationDegrees, flipX, flipY); + } + + private static int getExifOrientationTag(ContentResolver resolver, Uri imageUri) { + // We only support parsing EXIF orientation tag from local file on the device. + // See also: + // https://android-developers.googleblog.com/2016/12/introducing-the-exifinterface-support-library.html + if (!ContentResolver.SCHEME_CONTENT.equals(imageUri.getScheme()) + && !ContentResolver.SCHEME_FILE.equals(imageUri.getScheme())) { + return 0; + } + + ExifInterface exif; + try (InputStream inputStream = resolver.openInputStream(imageUri)) { + if (inputStream == null) { + return 0; + } + + exif = new ExifInterface(inputStream); + } catch (IOException e) { + Log.e(TAG, "failed to open file to read rotation meta data: " + imageUri, e); + return 0; + } + + return exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); + } + + /** + * Converts YUV_420_888 to NV21 bytebuffer. + * + *

The NV21 format consists of a single byte array containing the Y, U and V values. For an + * image of size S, the first S positions of the array contain all the Y values. The remaining + * positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both + * dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain + * S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU + * + *

YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled + * by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and + * V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into + * the first part of the NV21 array. The U and V planes may already have the representation in the + * NV21 format. This happens if the planes share the same buffer, the V buffer is one position + * before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy + * them to the NV21 array. + */ + private static ByteBuffer yuv420ThreePlanesToNV21( + Plane[] yuv420888planes, int width, int height) { + int imageSize = width * height; + byte[] out = new byte[imageSize + 2 * (imageSize / 4)]; + + if (areUVPlanesNV21(yuv420888planes, width, height)) { + // Copy the Y values. + yuv420888planes[0].getBuffer().get(out, 0, imageSize); + + ByteBuffer uBuffer = yuv420888planes[1].getBuffer(); + ByteBuffer vBuffer = yuv420888planes[2].getBuffer(); + // Get the first V value from the V buffer, since the U buffer does not contain it. + vBuffer.get(out, imageSize, 1); + // Copy the first U value and the remaining VU values from the U buffer. + uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1); + } else { + // Fallback to copying the UV values one by one, which is slower but also works. + // Unpack Y. + unpackPlane(yuv420888planes[0], width, height, out, 0, 1); + // Unpack U. + unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2); + // Unpack V. + unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2); + } + + return ByteBuffer.wrap(out); + } + + /** Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format. */ + private static boolean areUVPlanesNV21(Plane[] planes, int width, int height) { + int imageSize = width * height; + + ByteBuffer uBuffer = planes[1].getBuffer(); + ByteBuffer vBuffer = planes[2].getBuffer(); + + // Backup buffer properties. + int vBufferPosition = vBuffer.position(); + int uBufferLimit = uBuffer.limit(); + + // Advance the V buffer by 1 byte, since the U buffer will not contain the first V value. + vBuffer.position(vBufferPosition + 1); + // Chop off the last byte of the U buffer, since the V buffer will not contain the last U value. + uBuffer.limit(uBufferLimit - 1); + + // Check that the buffers are equal and have the expected number of elements. + boolean areNV21 = + (vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0); + + // Restore buffers to their initial state. + vBuffer.position(vBufferPosition); + uBuffer.limit(uBufferLimit); + + return areNV21; + } + + /** + * Unpack an image plane into a byte array. + * + *

The input plane data will be copied in 'out', starting at 'offset' and every pixel will be + * spaced by 'pixelStride'. Note that there is no row padding on the output. + */ + private static void unpackPlane( + Plane plane, int width, int height, byte[] out, int offset, int pixelStride) { + ByteBuffer buffer = plane.getBuffer(); + buffer.rewind(); + + // Compute the size of the current plane. + // We assume that it has the aspect ratio as the original image. + int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride(); + if (numRow == 0) { + return; + } + int scaleFactor = height / numRow; + int numCol = width / scaleFactor; + + // Extract the data in the output buffer. + int outputPos = offset; + int rowStart = 0; + for (int row = 0; row < numRow; row++) { + int inputPos = rowStart; + for (int col = 0; col < numCol; col++) { + out[outputPos] = buffer.get(inputPos); + outputPos += pixelStride; + inputPos += plane.getPixelStride(); + } + rowStart += plane.getRowStride(); + } + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/CameraImageGraphic.java b/app/src/main/java/com/assimilate/alltrans/common/CameraImageGraphic.java new file mode 100755 index 0000000..c783a2e --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/CameraImageGraphic.java @@ -0,0 +1,38 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +import android.graphics.Bitmap; +import android.graphics.Canvas; + +import com.assimilate.alltrans.curview.GraphicOverlay; + +/** Draw camera image to background. */ +public class CameraImageGraphic extends GraphicOverlay.Graphic { + + private final Bitmap bitmap; + + public CameraImageGraphic(GraphicOverlay overlay, Bitmap bitmap) { + super(overlay); + this.bitmap = bitmap; + } + + @Override + public void draw(Canvas canvas) { + canvas.drawBitmap(bitmap, getTransformationMatrix(), null); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/FrameMetadata.java b/app/src/main/java/com/assimilate/alltrans/common/FrameMetadata.java new file mode 100755 index 0000000..e9d2242 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/FrameMetadata.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +/** Describing a frame info. */ +public class FrameMetadata { + + private final int width; + private final int height; + private final int rotation; + + public int getWidth() { + return width; + } + + public int getHeight() { + return height; + } + + public int getRotation() { + return rotation; + } + + private FrameMetadata(int width, int height, int rotation) { + this.width = width; + this.height = height; + this.rotation = rotation; + } + + /** Builder of {@link FrameMetadata}. */ + public static class Builder { + + private int width; + private int height; + private int rotation; + + public Builder setWidth(int width) { + this.width = width; + return this; + } + + public Builder setHeight(int height) { + this.height = height; + return this; + } + + public Builder setRotation(int rotation) { + this.rotation = rotation; + return this; + } + + public FrameMetadata build() { + return new FrameMetadata(width, height, rotation); + } + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/InferenceInfoGraphic.java b/app/src/main/java/com/assimilate/alltrans/common/InferenceInfoGraphic.java new file mode 100755 index 0000000..2d070ca --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/InferenceInfoGraphic.java @@ -0,0 +1,92 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; + +import androidx.annotation.Nullable; + +import com.assimilate.alltrans.curview.GraphicOverlay; + +/** Graphic instance for rendering inference info (latency, FPS, resolution) in an overlay view. */ +public class InferenceInfoGraphic extends GraphicOverlay.Graphic { + + private static final int TEXT_COLOR = Color.WHITE; + private static final float TEXT_SIZE = 60.0f; + + private final Paint textPaint; + private final GraphicOverlay overlay; + private final long frameLatency; + private final long detectorLatency; + + // Only valid when a stream of input images is being processed. Null for single image mode. + @Nullable private final Integer framesPerSecond; + private boolean showLatencyInfo = true; + + public InferenceInfoGraphic( + GraphicOverlay overlay, + long frameLatency, + long detectorLatency, + @Nullable Integer framesPerSecond) { + super(overlay); + this.overlay = overlay; + this.frameLatency = frameLatency; + this.detectorLatency = detectorLatency; + this.framesPerSecond = framesPerSecond; + textPaint = new Paint(); + textPaint.setColor(TEXT_COLOR); + textPaint.setTextSize(TEXT_SIZE); + textPaint.setShadowLayer(5.0f, 0f, 0f, Color.BLACK); + postInvalidate(); + } + + /** Creates an {@link InferenceInfoGraphic} to only display image size. */ + public InferenceInfoGraphic(GraphicOverlay overlay) { + this(overlay, 0, 0, null); + showLatencyInfo = false; + } + + @Override + public synchronized void draw(Canvas canvas) { + float x = TEXT_SIZE * 0.5f; + float y = TEXT_SIZE * 1.5f; + + canvas.drawText( + "InputImage size: " + overlay.getImageHeight() + "x" + overlay.getImageWidth(), + x, + y, + textPaint); + + if (!showLatencyInfo) { + return; + } + // Draw FPS (if valid) and inference latency + if (framesPerSecond != null) { + canvas.drawText( + "FPS: " + framesPerSecond + ", Frame latency: " + frameLatency + " ms", + x, + y + TEXT_SIZE, + textPaint); + } else { + canvas.drawText("Frame latency: " + frameLatency + " ms", x, y + TEXT_SIZE, textPaint); + } + canvas.drawText( + "Detector latency: " + detectorLatency + " ms", x, y + TEXT_SIZE * 2, textPaint); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/Language.java b/app/src/main/java/com/assimilate/alltrans/common/Language.java new file mode 100644 index 0000000..3d55116 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/Language.java @@ -0,0 +1,79 @@ +package com.assimilate.alltrans.common; + +import android.os.Parcel; +import android.os.Parcelable; + +import androidx.annotation.NonNull; + +import com.google.gson.annotations.SerializedName; + +public class Language implements Parcelable { + @SerializedName("language") + private String mLanguage; + @SerializedName("languageCode") + private String mLanguageCode; + @SerializedName("speechCode") + private String mSpeechCode; + public String mFlagUrl; + + public Language() { + } + + public Language(String language, String languageCode, String speechCode) { + this.mLanguage = language; + this.mLanguageCode = languageCode; + this.mSpeechCode = speechCode; + this.mFlagUrl = null; + } + + public Language(String language, String languageCode, String speechCode, String flagUrl) { + this.mLanguage = language; + this.mLanguageCode = languageCode; + this.mSpeechCode = speechCode; + this.mFlagUrl = flagUrl; + } + + protected Language(Parcel in) { + mLanguage = in.readString(); + mLanguageCode = in.readString(); + mSpeechCode = in.readString(); + mFlagUrl = in.readString(); + } + + public static final Creator CREATOR = new Creator() { + @Override + public Language createFromParcel(Parcel in) { + return new Language(in); + } + + @Override + public Language[] newArray(int size) { + return new Language[size]; + } + }; + + public String getLanguage() { + return mLanguage; + } + + public String getLanguageCode() { + return mLanguageCode; + } + + public String getSpeechCode() { + return mSpeechCode; + } + + @Override + public int describeContents() { + return 0; + } + + @Override + public void writeToParcel(@NonNull Parcel parcel, int i) { + parcel.writeString(mLanguage); + parcel.writeString(mLanguageCode); + parcel.writeString(mSpeechCode); + parcel.writeString(mFlagUrl); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/LanguagesConstants.java b/app/src/main/java/com/assimilate/alltrans/common/LanguagesConstants.java new file mode 100644 index 0000000..e77523c --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/LanguagesConstants.java @@ -0,0 +1,135 @@ +package com.assimilate.alltrans.common; + +import android.content.Context; +import android.net.Uri; +import android.text.TextUtils; + +import androidx.annotation.NonNull; + +import com.assimilate.alltrans.R; +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +public class LanguagesConstants { + private static boolean instanced = false; + private volatile static LanguagesConstants languagesConstant; + + private final ArrayList languages; + + private LanguagesConstants() { + synchronized (LanguagesConstants.class) { + if (instanced) { + throw new RuntimeException("Instance multiple LanguagesConstants."); + } else { + languages = new ArrayList(); + instanced = true; + } + + if(languagesConstant != null) { + throw new RuntimeException("looper error(LanguagesConstants instanced)."); + } + } + } + + public static LanguagesConstants getInstance() { + if (null == languagesConstant) { + synchronized (LanguagesConstants.class) { + if (null == languagesConstant) { + languagesConstant = new LanguagesConstants(); + } + } + } + return languagesConstant; + } + + public ArrayList getList(@NonNull final Context context) { + if (languages.isEmpty()) { + getLanguages(context); + } + return languages; + } + + private void getLanguages(final Context context) { + InputStream inputStream = null; + InputStreamReader inputStreamReader = null; + BufferedReader bufferedReader = null; + try { + // step1. 获得语言文件的输入流 + inputStream = context.getResources().openRawResource(R.raw.languages); + inputStreamReader = new InputStreamReader(inputStream); + bufferedReader = new BufferedReader(inputStreamReader); + + // step2. 读取json文件信息 + final StringBuilder builder = new StringBuilder(); + String line = null; + while ((line = bufferedReader.readLine()) != null) { + builder.append(line); + } + + // step3. 将json文本转换成Java对象 + final String result = builder.toString(); + if (!TextUtils.isEmpty(result)) { + Gson gson = new Gson(); + Type listType = new TypeToken>(){}.getType(); + + List temp = gson.fromJson(result, listType); + if (null != temp && !temp.isEmpty()) { + languages.clear(); + for (Language iLanguage : temp) { + iLanguage.mFlagUrl = getLanguageFlag(context, iLanguage.getLanguageCode()); + } + languages.addAll(temp); + } + } + + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + if (null != bufferedReader) { + try { + bufferedReader.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + if (null != inputStreamReader) { + try { + inputStreamReader.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + if (null != inputStream) { + try { + inputStream.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + Logger.d("log", "Load languages from configuration successfully."); + } + } + + private String getLanguageFlag(final Context context, final String languageCode) { + final String folderName = Objects.requireNonNull(R.class.getPackage()).getName(); + if (TextUtils.isEmpty(languageCode) || TextUtils.isEmpty(folderName)) { + return null; + } else { + int drawableResourceId = context.getResources().getIdentifier("z_" + languageCode, "mipmap", context.getPackageName()); + if (0 != drawableResourceId) { + return Uri.parse("android.resource://" + folderName + "/" + drawableResourceId).toString(); + } else { + return null; + } + } + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/Logger.java b/app/src/main/java/com/assimilate/alltrans/common/Logger.java new file mode 100644 index 0000000..63525bb --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/Logger.java @@ -0,0 +1,55 @@ +package com.assimilate.alltrans.common; + +import android.text.TextUtils; +import android.util.Log; + +import com.assimilate.alltrans.BuildConfig; + +public class Logger { + private final static boolean ENABLE_LOGGER = !"release".equalsIgnoreCase(BuildConfig.BUILD_TYPE); + + public final static String COMMAND_INTERSTITIAL_AD = "google_interstitial_ad"; + public final static String COMMAND_NATIVE_AD = "google_native_ad"; + + public static void i(final String tag, final String message) { + if (ENABLE_LOGGER) { + Log.i(getTag(tag), getMessage(message)); + } + } + + public static void d(final String tag, final String message) { + if (ENABLE_LOGGER) { + Log.d(getTag(tag), getMessage(message)); + } + } + + public static void w(final String tag, final String message) { + if (ENABLE_LOGGER) { + Log.w(getTag(tag), getMessage(message)); + } + } + + public static void e(final String tag, final String message) { + if (ENABLE_LOGGER) { + Log.e(getTag(tag), getMessage(message)); + } + } + + private static String getTag(String tag) { + if (TextUtils.isEmpty(tag)) { + return "TAG"; + } + if (tag.length() > 23) { + final int index2 = tag.length()-10; + tag = tag.substring(0, 10) + "..." + tag.substring(index2); + } + return tag; + } + + private static String getMessage(String msg) { + if (TextUtils.isEmpty(msg)) { + return "null"; + } + return msg; + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/PreferenceUtils.java b/app/src/main/java/com/assimilate/alltrans/common/PreferenceUtils.java new file mode 100755 index 0000000..5c6246a --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/PreferenceUtils.java @@ -0,0 +1,84 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +import android.content.Context; +import android.content.SharedPreferences; +import android.preference.PreferenceManager; + +import androidx.annotation.StringRes; + +import com.assimilate.alltrans.R; + +/** + * Utility class to retrieve shared preferences. + */ +public class PreferenceUtils { + + public static boolean shouldHideDetectionInfo(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_info_hide); + return sharedPreferences.getBoolean(prefKey, false); + } + + + public static boolean shouldGroupRecognizedTextInBlocks(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_group_recognized_text_in_blocks); + return sharedPreferences.getBoolean(prefKey, false); + } + + public static boolean showLanguageTag(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_show_language_tag); + return sharedPreferences.getBoolean(prefKey, false); + } + + public static boolean shouldShowTextConfidence(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_show_text_confidence); + return sharedPreferences.getBoolean(prefKey, false); + } + + public static boolean preferGPUForPoseDetection(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_pose_detector_prefer_gpu); + return sharedPreferences.getBoolean(prefKey, true); + } + + /** + * Mode type preference is backed by {@link android.preference.ListPreference} which only support + * storing its entry value as string type, so we need to retrieve as string and then convert to + * integer. + */ + private static int getModeTypePreferenceValue( + Context context, @StringRes int prefKeyResId, int defaultValue) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(prefKeyResId); + return Integer.parseInt(sharedPreferences.getString(prefKey, String.valueOf(defaultValue))); + } + + public static boolean isCameraLiveViewportEnabled(Context context) { + SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); + String prefKey = context.getString(R.string.pref_key_camera_live_viewport); + return sharedPreferences.getBoolean(prefKey, false); + } + + + private PreferenceUtils() { + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/ScopedExecutor.java b/app/src/main/java/com/assimilate/alltrans/common/ScopedExecutor.java new file mode 100755 index 0000000..6a7793e --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/ScopedExecutor.java @@ -0,0 +1,62 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common; + +import androidx.annotation.NonNull; + +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Wraps an existing executor to provide a {@link #shutdown} method that allows subsequent + * cancellation of submitted runnables. + */ +public class ScopedExecutor implements Executor { + + private final Executor executor; + private final AtomicBoolean shutdown = new AtomicBoolean(); + + public ScopedExecutor(@NonNull Executor executor) { + this.executor = executor; + } + + @Override + public void execute(@NonNull Runnable command) { + // Return early if this object has been shut down. + if (shutdown.get()) { + return; + } + executor.execute( + () -> { + // Check again in case it has been shut down in the mean time. + if (shutdown.get()) { + return; + } + command.run(); + }); + } + + /** + * After this method is called, no runnables that have been submitted or are subsequently + * submitted will start to execute, turning this executor into a no-op. + * + *

Runnables that have already started to execute will continue. + */ + public void shutdown() { + shutdown.set(true); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/Sort.java b/app/src/main/java/com/assimilate/alltrans/common/Sort.java new file mode 100644 index 0000000..5fe4e31 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/Sort.java @@ -0,0 +1,5 @@ +package com.assimilate.alltrans.common; + +public enum Sort { + language, languageCode, speechCode; +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/TextGraphic.kt b/app/src/main/java/com/assimilate/alltrans/common/TextGraphic.kt new file mode 100644 index 0000000..df70440 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/TextGraphic.kt @@ -0,0 +1,223 @@ +package com.assimilate.alltrans.common +import android.graphics.Canvas +import android.graphics.Color +import android.graphics.Paint +import android.graphics.RectF +import android.text.TextPaint +import android.util.Log +import com.assimilate.alltrans.curview.GraphicOverlay + +import com.google.mlkit.vision.text.Text +import java.util.Arrays +import kotlin.math.max +import kotlin.math.min + +/** + * Graphic instance for rendering TextBlock position, size, and ID within an associated graphic + * overlay view. + */ +class TextGraphic +constructor( + overlay: GraphicOverlay?, + private val text: Text, + private val shouldGroupTextInBlocks: Boolean, + private val showLanguageTag: Boolean, + private val showConfidence: Boolean +) : GraphicOverlay.Graphic(overlay) { + + private val rectPaint: Paint = Paint() + private val textPaint: TextPaint + private val labelPaint: Paint + + init { + rectPaint.color = MARKER_COLOR + rectPaint.style = Paint.Style.STROKE + rectPaint.strokeWidth = STROKE_WIDTH + textPaint = TextPaint() + textPaint.color = TEXT_COLOR + textPaint.textSize = TEXT_SIZE + labelPaint = Paint() + labelPaint.color = MARKER_COLOR + labelPaint.style = Paint.Style.FILL + // Redraw the overlay, as this graphic has been added. + postInvalidate() + } + + /** Draws the text block annotations for position, size, and raw value on the supplied canvas. */ + override fun draw(canvas: Canvas) { + + for (textBlock in text.textBlocks) { // Renders the text at the bottom of the box. + Log.d(TAG, "TextBlock text is: " + textBlock.text) + Log.d(TAG, "TextBlock recognizedLanguage is: " + textBlock.recognizedLanguage) + Log.d(TAG, "TextBlock boundingbox is: " + textBlock.boundingBox) + Log.d(TAG, "TextBlock cornerpoint is: " + Arrays.toString(textBlock.cornerPoints)) + + + if (shouldGroupTextInBlocks) { + drawText( + getFormattedText( + textBlock.text, + textBlock.recognizedLanguage, + confidence = null + ), + RectF(textBlock.boundingBox), + TEXT_SIZE * textBlock.lines.size + 2 * STROKE_WIDTH, + canvas + ) + } else { + for (line in textBlock.lines) { + Log.d(TAG, "Line text is: " + line.text) + Log.d(TAG, "Line boundingbox is: " + line.boundingBox) + Log.d(TAG, "Line cornerpoint is: " + Arrays.toString(line.cornerPoints)) + Log.d(TAG, "Line confidence is: " + line.confidence) + Log.d(TAG, "Line angle is: " + line.angle) + // Draws the bounding box around the TextBlock. + val rect = RectF(line.boundingBox) + drawText( + getFormattedText(line.text, line.recognizedLanguage, line.confidence), + rect, + TEXT_SIZE + 2 * STROKE_WIDTH, + canvas + ) + for (element in line.elements) { + Log.d(TAG, "Element text is: " + element.text) + Log.d(TAG, "Element boundingbox is: " + element.boundingBox) + Log.d( + TAG, + "Element cornerpoint is: " + Arrays.toString(element.cornerPoints) + ) + Log.d(TAG, "Element language is: " + element.recognizedLanguage) + Log.d(TAG, "Element confidence is: " + element.confidence) + Log.d(TAG, "Element angle is: " + element.angle) + for (symbol in element.symbols) { + Log.d(TAG, "Symbol text is: " + symbol.text) + Log.d(TAG, "Symbol boundingbox is: " + symbol.boundingBox) + Log.d( + TAG, + "Symbol cornerpoint is: " + Arrays.toString(symbol.cornerPoints) + ) + Log.d(TAG, "Symbol confidence is: " + symbol.confidence) + Log.d(TAG, "Symbol angle is: " + symbol.angle) + } + } + } + } + } + + + val traString = + "aaaaaaaaaaa(`_`))bbbbbbbbbbb((`_`)ccccc(`_`)dhsihs(`_`)dhksskjh(`_`)dhskjfhsdkjfj(`_`)" + val delimiter = "(`_`)" + val parts = traString.split(delimiter) + + for (part in parts) { + Log.d(TAG, "TextBlock aaaaa is: $part") + + } + + // 遍历每个TextBlock并处理分割后的部分 + for ((index, textBlock) in text.textBlocks.withIndex()) { // 使用withIndex()获取索引 + Log.d(TAG, "TextBlock text is: " + textBlock.text) + Log.d(TAG, "TextBlock recognizedLanguage is: " + textBlock.recognizedLanguage) + Log.d(TAG, "TextBlock boundingbox is: " + textBlock.boundingBox) + Log.d(TAG, "TextBlock cornerpoint is: " + Arrays.toString(textBlock.cornerPoints)) + + if (shouldGroupTextInBlocks) { + // 获取当前索引对应的part + val part = if (index < parts.size) parts[index] else "" + + drawText( + getFormattedText( + part, + textBlock.recognizedLanguage, + confidence = null + ), + RectF(textBlock.boundingBox), + TEXT_SIZE * textBlock.lines.size + 2 * STROKE_WIDTH, + canvas + ) + } else { + for (line in textBlock.lines) { + Log.d(TAG, "Line text is: " + line.text) + Log.d(TAG, "Line boundingbox is: " + line.boundingBox) + Log.d(TAG, "Line cornerpoint is: " + Arrays.toString(line.cornerPoints)) + Log.d(TAG, "Line confidence is: " + line.confidence) + Log.d(TAG, "Line angle is: " + line.angle) + // Draws the bounding box around the TextBlock. + val rect = RectF(line.boundingBox) + drawText( + getFormattedText(line.text, line.recognizedLanguage, line.confidence), + rect, + TEXT_SIZE + 2 * STROKE_WIDTH, + canvas + ) + for (element in line.elements) { + Log.d(TAG, "Element text is: " + element.text) + Log.d(TAG, "Element boundingbox is: " + element.boundingBox) + Log.d( + TAG, + "Element cornerpoint is: " + Arrays.toString(element.cornerPoints) + ) + Log.d(TAG, "Element language is: " + element.recognizedLanguage) + Log.d(TAG, "Element confidence is: " + element.confidence) + Log.d(TAG, "Element angle is: " + element.angle) + for (symbol in element.symbols) { + Log.d(TAG, "Symbol text is: " + symbol.text) + Log.d(TAG, "Symbol boundingbox is: " + symbol.boundingBox) + Log.d( + TAG, + "Symbol cornerpoint is: " + Arrays.toString(symbol.cornerPoints) + ) + Log.d(TAG, "Symbol confidence is: " + symbol.confidence) + Log.d(TAG, "Symbol angle is: " + symbol.angle) + } + } + } + } + } + + } + + private fun getFormattedText(text: String, languageTag: String, confidence: Float?): String { + val res = + if (showLanguageTag) String.format( + TEXT_WITH_LANGUAGE_TAG_FORMAT, + languageTag, + text + ) else text + return if (showConfidence && confidence != null) String.format("%s (%.2f)", res, confidence) + else res + } + + private fun drawText(text: String, rect: RectF, textHeight: Float, canvas: Canvas) { + // If the image is flipped, the left will be translated to right, and the right to left. + val x0 = translateX(rect.left) + val x1 = translateX(rect.right) + rect.left = min(x0, x1) + rect.right = max(x0, x1) + rect.top = translateY(rect.top) + rect.bottom = translateY(rect.bottom) + canvas.drawRect(rect, rectPaint) + val textWidth = textPaint.measureText(text) + canvas.drawRect( + rect.left - STROKE_WIDTH, + rect.top - textHeight, + rect.left + textWidth + 2 * STROKE_WIDTH, + rect.top, + labelPaint + ) + // Renders the text at the bottom of the box. + canvas.drawText(text, rect.left, rect.top - STROKE_WIDTH, textPaint) + } + + companion object { + private const val TAG = "TextGraphic" + private const val TEXT_WITH_LANGUAGE_TAG_FORMAT = "%s:%s" + private const val TEXT_COLOR = Color.BLACK + private const val MARKER_COLOR = Color.GREEN + private const val TEXT_SIZE = 54.0f + private const val STROKE_WIDTH = 4.0f + } + + +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/common/TextRecognitionProcessor.kt b/app/src/main/java/com/assimilate/alltrans/common/TextRecognitionProcessor.kt new file mode 100644 index 0000000..7748dd9 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/TextRecognitionProcessor.kt @@ -0,0 +1,123 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common + +import android.content.Context +import android.util.Log +import com.assimilate.alltrans.curview.GraphicOverlay +import com.google.android.gms.tasks.Task +import com.google.mlkit.vision.common.InputImage +import com.google.mlkit.vision.text.Text +import com.google.mlkit.vision.text.TextRecognition +import com.google.mlkit.vision.text.TextRecognizer +import com.google.mlkit.vision.text.TextRecognizerOptionsInterface + +/** Processor for the text detector demo. */ +class TextRecognitionProcessor( + private val context: Context, + textRecognizerOptions: TextRecognizerOptionsInterface +) : VisionProcessorBase(context) { + private val textRecognizer: TextRecognizer = TextRecognition.getClient(textRecognizerOptions) + private val shouldGroupRecognizedTextInBlocks: Boolean = + PreferenceUtils.shouldGroupRecognizedTextInBlocks(context) + private val showLanguageTag: Boolean = PreferenceUtils.showLanguageTag(context) + private val showConfidence: Boolean = PreferenceUtils.shouldShowTextConfidence(context) + + override fun stop() { + super.stop() + textRecognizer.close() + } + + override fun detectInImage(image: InputImage): Task { + return textRecognizer.process(image) + } + + override fun onSuccess(text: Text, graphicOverlay: GraphicOverlay) { + Log.d(TAG, "On-device Text detection successful") + logExtrasForTesting(text) + graphicOverlay.add( + TextGraphic( + graphicOverlay, + text, + shouldGroupRecognizedTextInBlocks, + showLanguageTag, + showConfidence + ) + ) + } + + override fun onFailure(e: Exception) { + Log.w(TAG, "Text detection failed.$e") + } + + companion object { + private const val TAG = "TextRecProcessor" + private fun logExtrasForTesting(text: Text?) { + if (text != null) { + Log.v(MANUAL_TESTING_LOG, "text context is : " + text.text) + Log.v(MANUAL_TESTING_LOG, "Detected text has : " + text.textBlocks.size + " blocks") + for (i in text.textBlocks.indices) { + val lines = text.textBlocks[i].lines + Log.v( + MANUAL_TESTING_LOG, + String.format("Detected text block %d has %d lines", i, lines.size) + ) + for (j in lines.indices) { + val elements = lines[j].elements + Log.v( + MANUAL_TESTING_LOG, + String.format("Detected text line %d has %d elements", j, elements.size) + ) + for (k in elements.indices) { + val element = elements[k] + Log.v( + MANUAL_TESTING_LOG, + String.format("Detected text element %d says: %s", k, element.text) + ) + Log.v( + MANUAL_TESTING_LOG, + String.format( + "Detected text element %d has a bounding box: %s", + k, + element.boundingBox!!.flattenToString() + ) + ) + Log.v( + MANUAL_TESTING_LOG, + String.format( + "Expected corner point size is 4, get %d", + element.cornerPoints!!.size + ) + ) + for (point in element.cornerPoints!!) { + Log.v( + MANUAL_TESTING_LOG, + String.format( + "Corner point for element %d is located at: x - %d, y = %d", + k, + point.x, + point.y + ) + ) + } + } + } + } + } + } + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/VisionImageProcessor.java b/app/src/main/java/com/assimilate/alltrans/common/VisionImageProcessor.java new file mode 100755 index 0000000..77d09f3 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/VisionImageProcessor.java @@ -0,0 +1,29 @@ + +package com.assimilate.alltrans.common; + +import android.graphics.Bitmap; + +import androidx.camera.core.ImageProxy; + +import com.assimilate.alltrans.curview.GraphicOverlay; +import com.google.mlkit.common.MlKitException; + +import java.nio.ByteBuffer; + +/** An interface to process the images with different vision detectors and custom image models. */ +public interface VisionImageProcessor { + + /** Processes a bitmap image. */ + void processBitmap(Bitmap bitmap, GraphicOverlay graphicOverlay); + + /** Processes ByteBuffer image data, e.g. used for Camera1 live preview case. */ + void processByteBuffer( + ByteBuffer data, FrameMetadata frameMetadata, GraphicOverlay graphicOverlay) + throws MlKitException; + + /** Processes ImageProxy image data, e.g. used for CameraX live preview case. */ + void processImageProxy(ImageProxy image, GraphicOverlay graphicOverlay) throws MlKitException; + + /** Stops the underlying machine learning model and release resources. */ + void stop(); +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/VisionProcessorBase.kt b/app/src/main/java/com/assimilate/alltrans/common/VisionProcessorBase.kt new file mode 100644 index 0000000..f108a09 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/VisionProcessorBase.kt @@ -0,0 +1,409 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.common + +import android.app.ActivityManager +import android.content.Context +import android.graphics.Bitmap +import android.os.Build.VERSION_CODES +import android.os.SystemClock +import android.util.Log +import android.widget.Toast +import androidx.annotation.GuardedBy +import androidx.annotation.RequiresApi +import androidx.camera.core.ExperimentalGetImage +import androidx.camera.core.ImageProxy +import com.assimilate.alltrans.curview.GraphicOverlay +import com.google.android.gms.tasks.OnFailureListener +import com.google.android.gms.tasks.OnSuccessListener +import com.google.android.gms.tasks.Task +import com.google.android.gms.tasks.TaskExecutors +import com.google.android.gms.tasks.Tasks +import com.google.android.odml.image.BitmapMlImageBuilder +import com.google.android.odml.image.ByteBufferMlImageBuilder +import com.google.android.odml.image.MediaMlImageBuilder +import com.google.android.odml.image.MlImage +import com.google.mlkit.common.MlKitException +import com.google.mlkit.vision.common.InputImage + +import java.lang.Math.max +import java.lang.Math.min +import java.nio.ByteBuffer +import java.util.Timer +import java.util.TimerTask + +/** + * Abstract base class for ML Kit frame processors. Subclasses need to implement {@link + * #onSuccess(T, FrameMetadata, GraphicOverlay)} to define what they want to with the detection + * results and {@link #detectInImage(VisionImage)} to specify the detector object. + * + * @param The type of the detected feature. + */ +abstract class VisionProcessorBase(context: Context) : VisionImageProcessor { + + companion object { + const val MANUAL_TESTING_LOG = "LogTagForTest" + private const val TAG = "VisionProcessorBase" + } + + private var activityManager: ActivityManager = + context.getSystemService(Context.ACTIVITY_SERVICE) as ActivityManager + private val fpsTimer = Timer() + private val executor = ScopedExecutor(TaskExecutors.MAIN_THREAD) + + // Whether this processor is already shut down + private var isShutdown = false + + // Used to calculate latency, running in the same thread, no sync needed. + private var numRuns = 0 + private var totalFrameMs = 0L + private var maxFrameMs = 0L + private var minFrameMs = Long.MAX_VALUE + private var totalDetectorMs = 0L + private var maxDetectorMs = 0L + private var minDetectorMs = Long.MAX_VALUE + + // Frame count that have been processed so far in an one second interval to calculate FPS. + private var frameProcessedInOneSecondInterval = 0 + private var framesPerSecond = 0 + + // To keep the latest images and its metadata. + @GuardedBy("this") private var latestImage: ByteBuffer? = null + @GuardedBy("this") private var latestImageMetaData: FrameMetadata? = null + // To keep the images and metadata in process. + @GuardedBy("this") private var processingImage: ByteBuffer? = null + @GuardedBy("this") private var processingMetaData: FrameMetadata? = null + + init { + fpsTimer.schedule( + object : TimerTask() { + override fun run() { + framesPerSecond = frameProcessedInOneSecondInterval + frameProcessedInOneSecondInterval = 0 + } + }, + 0, + 1000 + ) + } + + // -----------------Code for processing single still image---------------------------------------- + override fun processBitmap(bitmap: Bitmap?, graphicOverlay: GraphicOverlay) { + val frameStartMs = SystemClock.elapsedRealtime() + + if (isMlImageEnabled(graphicOverlay.context)) { + val mlImage = BitmapMlImageBuilder(bitmap!!).build() + requestDetectInImage( + mlImage, + graphicOverlay, + /* originalCameraImage= */ null, + /* shouldShowFps= */ false, + frameStartMs + ) + mlImage.close() + return + } + + requestDetectInImage( + InputImage.fromBitmap(bitmap!!, 0), + graphicOverlay, + /* originalCameraImage= */ null, + /* shouldShowFps= */ false, + frameStartMs + ) + } + + // -----------------Code for processing live preview frame from Camera1 API----------------------- + @Synchronized + override fun processByteBuffer( + data: ByteBuffer?, + frameMetadata: FrameMetadata?, + graphicOverlay: GraphicOverlay + ) { + latestImage = data + latestImageMetaData = frameMetadata + if (processingImage == null && processingMetaData == null) { + processLatestImage(graphicOverlay) + } + } + + @Synchronized + private fun processLatestImage(graphicOverlay: GraphicOverlay) { + processingImage = latestImage + processingMetaData = latestImageMetaData + latestImage = null + latestImageMetaData = null + if (processingImage != null && processingMetaData != null && !isShutdown) { + processImage(processingImage!!, processingMetaData!!, graphicOverlay) + } + } + + private fun processImage( + data: ByteBuffer, + frameMetadata: FrameMetadata, + graphicOverlay: GraphicOverlay + ) { + val frameStartMs = SystemClock.elapsedRealtime() + // If live viewport is on (that is the underneath surface view takes care of the camera preview + // drawing), skip the unnecessary bitmap creation that used for the manual preview drawing. + val bitmap = + if (PreferenceUtils.isCameraLiveViewportEnabled(graphicOverlay.context)) null + else BitmapUtils.getBitmap(data, frameMetadata) + + if (isMlImageEnabled(graphicOverlay.context)) { + val mlImage = + ByteBufferMlImageBuilder( + data, + frameMetadata.width, + frameMetadata.height, + MlImage.IMAGE_FORMAT_NV21 + ) + .setRotation(frameMetadata.rotation) + .build() + requestDetectInImage(mlImage, graphicOverlay, bitmap, /* shouldShowFps= */ true, frameStartMs) + .addOnSuccessListener(executor) { processLatestImage(graphicOverlay) } + + // This is optional. Java Garbage collection can also close it eventually. + mlImage.close() + return + } + + requestDetectInImage( + InputImage.fromByteBuffer( + data, + frameMetadata.width, + frameMetadata.height, + frameMetadata.rotation, + InputImage.IMAGE_FORMAT_NV21 + ), + graphicOverlay, + bitmap, + /* shouldShowFps= */ true, + frameStartMs + ) + .addOnSuccessListener(executor) { processLatestImage(graphicOverlay) } + } + + // -----------------Code for processing live preview frame from CameraX API----------------------- + @RequiresApi(VERSION_CODES.LOLLIPOP) + @ExperimentalGetImage + override fun processImageProxy(image: ImageProxy, graphicOverlay: GraphicOverlay) { + val frameStartMs = SystemClock.elapsedRealtime() + if (isShutdown) { + return + } + var bitmap: Bitmap? = null + if (!PreferenceUtils.isCameraLiveViewportEnabled(graphicOverlay.context)) { + bitmap = BitmapUtils.getBitmap(image) + } + + if (isMlImageEnabled(graphicOverlay.context)) { + val mlImage = + MediaMlImageBuilder(image.image!!).setRotation(image.imageInfo.rotationDegrees).build() + requestDetectInImage( + mlImage, + graphicOverlay, + /* originalCameraImage= */ bitmap, + /* shouldShowFps= */ true, + frameStartMs + ) + // When the image is from CameraX analysis use case, must call image.close() on received + // images when finished using them. Otherwise, new images may not be received or the camera + // may stall. + // Currently MlImage doesn't support ImageProxy directly, so we still need to call + // ImageProxy.close() here. + .addOnCompleteListener { image.close() } + + return + } + + requestDetectInImage( + InputImage.fromMediaImage(image.image!!, image.imageInfo.rotationDegrees), + graphicOverlay, + /* originalCameraImage= */ bitmap, + /* shouldShowFps= */ true, + frameStartMs + ) + // When the image is from CameraX analysis use case, must call image.close() on received + // images when finished using them. Otherwise, new images may not be received or the camera + // may stall. + .addOnCompleteListener { image.close() } + } + + // -----------------Common processing logic------------------------------------------------------- + private fun requestDetectInImage( + image: InputImage, + graphicOverlay: GraphicOverlay, + originalCameraImage: Bitmap?, + shouldShowFps: Boolean, + frameStartMs: Long + ): Task { + return setUpListener( + detectInImage(image), + graphicOverlay, + originalCameraImage, + shouldShowFps, + frameStartMs + ) + } + + private fun requestDetectInImage( + image: MlImage, + graphicOverlay: GraphicOverlay, + originalCameraImage: Bitmap?, + shouldShowFps: Boolean, + frameStartMs: Long + ): Task { + return setUpListener( + detectInImage(image), + graphicOverlay, + originalCameraImage, + shouldShowFps, + frameStartMs + ) + } + + private fun setUpListener( + task: Task, + graphicOverlay: GraphicOverlay, + originalCameraImage: Bitmap?, + shouldShowFps: Boolean, + frameStartMs: Long + ): Task { + val detectorStartMs = SystemClock.elapsedRealtime() + return task + .addOnSuccessListener( + executor, + OnSuccessListener { results: T -> + val endMs = SystemClock.elapsedRealtime() + val currentFrameLatencyMs = endMs - frameStartMs + val currentDetectorLatencyMs = endMs - detectorStartMs + if (numRuns >= 500) { + resetLatencyStats() + } + numRuns++ + frameProcessedInOneSecondInterval++ + totalFrameMs += currentFrameLatencyMs + maxFrameMs = max(currentFrameLatencyMs, maxFrameMs) + minFrameMs = min(currentFrameLatencyMs, minFrameMs) + totalDetectorMs += currentDetectorLatencyMs + maxDetectorMs = max(currentDetectorLatencyMs, maxDetectorMs) + minDetectorMs = min(currentDetectorLatencyMs, minDetectorMs) + + // Only log inference info once per second. When frameProcessedInOneSecondInterval is + // equal to 1, it means this is the first frame processed during the current second. + if (frameProcessedInOneSecondInterval == 1) { + Log.d(TAG, "Num of Runs: $numRuns") + Log.d( + TAG, + "Frame latency: max=" + + maxFrameMs + + ", min=" + + minFrameMs + + ", avg=" + + totalFrameMs / numRuns + ) + Log.d( + TAG, + "Detector latency: max=" + + maxDetectorMs + + ", min=" + + minDetectorMs + + ", avg=" + + totalDetectorMs / numRuns + ) + val mi = ActivityManager.MemoryInfo() + activityManager.getMemoryInfo(mi) + val availableMegs: Long = mi.availMem / 0x100000L + Log.d(TAG, "Memory available in system: $availableMegs MB") + } + graphicOverlay.clear() + if (originalCameraImage != null) { + graphicOverlay.add(CameraImageGraphic(graphicOverlay, originalCameraImage)) + } + this@VisionProcessorBase.onSuccess(results, graphicOverlay) + if (!PreferenceUtils.shouldHideDetectionInfo(graphicOverlay.context)) { + graphicOverlay.add( + InferenceInfoGraphic( + graphicOverlay, + currentFrameLatencyMs, + currentDetectorLatencyMs, + if (shouldShowFps) framesPerSecond else null + ) + ) + } + graphicOverlay.postInvalidate() + } + ) + .addOnFailureListener( + executor, + OnFailureListener { e: Exception -> + graphicOverlay.clear() + graphicOverlay.postInvalidate() + val error = "Failed to process. Error: " + e.localizedMessage + Toast.makeText( + graphicOverlay.context, + """ + $error + Cause: ${e.cause} + """.trimIndent(), + Toast.LENGTH_SHORT + ) + .show() + Log.d(TAG, error) + e.printStackTrace() + this@VisionProcessorBase.onFailure(e) + } + ) + } + + override fun stop() { + executor.shutdown() + isShutdown = true + resetLatencyStats() + fpsTimer.cancel() + } + + private fun resetLatencyStats() { + numRuns = 0 + totalFrameMs = 0 + maxFrameMs = 0 + minFrameMs = Long.MAX_VALUE + totalDetectorMs = 0 + maxDetectorMs = 0 + minDetectorMs = Long.MAX_VALUE + } + + protected abstract fun detectInImage(image: InputImage): Task + + protected open fun detectInImage(image: MlImage): Task { + return Tasks.forException( + MlKitException( + "MlImage is currently not demonstrated for this feature", + MlKitException.INVALID_ARGUMENT + ) + ) + } + + protected abstract fun onSuccess(results: T, graphicOverlay: GraphicOverlay) + + protected abstract fun onFailure(e: Exception) + + protected open fun isMlImageEnabled(context: Context?): Boolean { + return false + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/common/Widget.java b/app/src/main/java/com/assimilate/alltrans/common/Widget.java new file mode 100644 index 0000000..954e562 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/common/Widget.java @@ -0,0 +1,18 @@ +package com.assimilate.alltrans.common; + +import android.app.Activity; +import android.widget.Toast; + +import androidx.annotation.NonNull; + +public class Widget { + private static volatile Toast toast; + + public static void makeToast(@NonNull final Activity mActivity, @NonNull final String msg) { + if (null != toast) { + toast.cancel(); + } + toast = Toast.makeText(mActivity, msg, Toast.LENGTH_SHORT); + toast.show(); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/curview/GraphicOverlay.java b/app/src/main/java/com/assimilate/alltrans/curview/GraphicOverlay.java new file mode 100755 index 0000000..5dbe45d --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/curview/GraphicOverlay.java @@ -0,0 +1,317 @@ +/* + * Copyright 2020 Google LLC. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.assimilate.alltrans.curview; + +import static java.lang.Math.max; +import static java.lang.Math.min; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.util.AttributeSet; +import android.view.View; + +import com.google.common.base.Preconditions; +import com.google.common.primitives.Ints; + +import java.util.ArrayList; +import java.util.List; + +/** + * A view which renders a series of custom graphics to be overlayed on top of an associated preview + * (i.e., the camera preview). The creator can add graphics objects, update the objects, and remove + * them, triggering the appropriate drawing and invalidation within the view. + * + *

Supports scaling and mirroring of the graphics relative the camera's preview properties. The + * idea is that detection items are expressed in terms of an image size, but need to be scaled up to + * the full view size, and also mirrored in the case of the front-facing camera. + * + *

Associated {@link Graphic} items should use the following methods to convert to view + * coordinates for the graphics that are drawn: + * + *

    + *
  1. {@link Graphic#scale(float)} adjusts the size of the supplied value from the image scale to + * the view scale. + *
  2. {@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the + * coordinate from the image's coordinate system to the view coordinate system. + *
+ */ +public class GraphicOverlay extends View { + private final Object lock = new Object(); + private final List graphics = new ArrayList<>(); + // Matrix for transforming from image coordinates to overlay view coordinates. + private final Matrix transformationMatrix = new Matrix(); + + private int imageWidth; + private int imageHeight; + // The factor of overlay View size to image size. Anything in the image coordinates need to be + // scaled by this amount to fit with the area of overlay View. + private float scaleFactor = 1.0f; + // The number of horizontal pixels needed to be cropped on each side to fit the image with the + // area of overlay View after scaling. + private float postScaleWidthOffset; + // The number of vertical pixels needed to be cropped on each side to fit the image with the + // area of overlay View after scaling. + private float postScaleHeightOffset; + private boolean isImageFlipped; + private boolean needUpdateTransformation = true; + + /** + * Base class for a custom graphics object to be rendered within the graphic overlay. Subclass + * this and implement the {@link Graphic#draw(Canvas)} method to define the graphics element. Add + * instances to the overlay using {@link GraphicOverlay#add(Graphic)}. + */ + public abstract static class Graphic { + private GraphicOverlay overlay; + + public Graphic(GraphicOverlay overlay) { + this.overlay = overlay; + } + + /** + * Draw the graphic on the supplied canvas. Drawing should use the following methods to convert + * to view coordinates for the graphics that are drawn: + * + *
    + *
  1. {@link Graphic#scale(float)} adjusts the size of the supplied value from the image + * scale to the view scale. + *
  2. {@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the + * coordinate from the image's coordinate system to the view coordinate system. + *
+ * + * @param canvas drawing canvas + */ + public abstract void draw(Canvas canvas); + + protected void drawRect( + Canvas canvas, float left, float top, float right, float bottom, Paint paint) { + canvas.drawRect(left, top, right, bottom, paint); + } + + protected void drawText(Canvas canvas, String text, float x, float y, Paint paint) { + canvas.drawText(text, x, y, paint); + } + + /** Adjusts the supplied value from the image scale to the view scale. */ + public float scale(float imagePixel) { + return imagePixel * overlay.scaleFactor; + } + + /** Returns the application context of the app. */ + public Context getApplicationContext() { + return overlay.getContext().getApplicationContext(); + } + + public boolean isImageFlipped() { + return overlay.isImageFlipped; + } + + /** + * Adjusts the x coordinate from the image's coordinate system to the view coordinate system. + */ + public float translateX(float x) { + if (overlay.isImageFlipped) { + return overlay.getWidth() - (scale(x) - overlay.postScaleWidthOffset); + } else { + return scale(x) - overlay.postScaleWidthOffset; + } + } + + /** + * Adjusts the y coordinate from the image's coordinate system to the view coordinate system. + */ + public float translateY(float y) { + return scale(y) - overlay.postScaleHeightOffset; + } + + /** + * Returns a {@link Matrix} for transforming from image coordinates to overlay view coordinates. + */ + public Matrix getTransformationMatrix() { + return overlay.transformationMatrix; + } + + public void postInvalidate() { + overlay.postInvalidate(); + } + + /** + * Given the {@code zInImagePixel}, update the color for the passed in {@code paint}. The color will be + * more red if the {@code zInImagePixel} is smaller, or more blue ish vice versa. This is + * useful to visualize the z value of landmarks via color for features like Pose and Face Mesh. + * + * @param paint the paint to update color with + * @param canvas the canvas used to draw with paint + * @param visualizeZ if true, paint color will be changed. + * @param rescaleZForVisualization if true, re-scale the z value with zMin and zMax to make + * color more distinguishable + * @param zInImagePixel the z value used to update the paint color + * @param zMin min value of all z values going to be passed in + * @param zMax max value of all z values going to be passed in + */ + public void updatePaintColorByZValue( + Paint paint, + Canvas canvas, + boolean visualizeZ, + boolean rescaleZForVisualization, + float zInImagePixel, + float zMin, + float zMax) { + if (!visualizeZ) { + return; + } + + // When visualizeZ is true, sets up the paint to different colors based on z values. + // Gets the range of z value. + float zLowerBoundInScreenPixel; + float zUpperBoundInScreenPixel; + + if (rescaleZForVisualization) { + zLowerBoundInScreenPixel = min(-0.001f, scale(zMin)); + zUpperBoundInScreenPixel = max(0.001f, scale(zMax)); + } else { + // By default, assume the range of z value in screen pixel is [-canvasWidth, canvasWidth]. + float defaultRangeFactor = 1f; + zLowerBoundInScreenPixel = -defaultRangeFactor * canvas.getWidth(); + zUpperBoundInScreenPixel = defaultRangeFactor * canvas.getWidth(); + } + + float zInScreenPixel = scale(zInImagePixel); + + if (zInScreenPixel < 0) { + // Sets up the paint to be red if the item is in front of the z origin. + // Maps values within [zLowerBoundInScreenPixel, 0) to [255, 0) and use it to control the + // color. The larger the value is, the more red it will be. + int v = (int) (zInScreenPixel / zLowerBoundInScreenPixel * 255); + v = Ints.constrainToRange(v, 0, 255); + paint.setARGB(255, 255, 255 - v, 255 - v); + } else { + // Sets up the paint to be blue if the item is behind the z origin. + // Maps values within [0, zUpperBoundInScreenPixel] to [0, 255] and use it to control the + // color. The larger the value is, the more blue it will be. + int v = (int) (zInScreenPixel / zUpperBoundInScreenPixel * 255); + v = Ints.constrainToRange(v, 0, 255); + paint.setARGB(255, 255 - v, 255 - v, 255); + } + } + } + + public GraphicOverlay(Context context, AttributeSet attrs) { + super(context, attrs); + addOnLayoutChangeListener( + (view, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) -> + needUpdateTransformation = true); + } + + /** Removes all graphics from the overlay. */ + public void clear() { + synchronized (lock) { + graphics.clear(); + } + postInvalidate(); + } + + /** Adds a graphic to the overlay. */ + public void add(Graphic graphic) { + synchronized (lock) { + graphics.add(graphic); + } + } + + /** Removes a graphic from the overlay. */ + public void remove(Graphic graphic) { + synchronized (lock) { + graphics.remove(graphic); + } + postInvalidate(); + } + + /** + * Sets the source information of the image being processed by detectors, including size and + * whether it is flipped, which informs how to transform image coordinates later. + * + * @param imageWidth the width of the image sent to ML Kit detectors + * @param imageHeight the height of the image sent to ML Kit detectors + * @param isFlipped whether the image is flipped. Should set it to true when the image is from the + * front camera. + */ + public void setImageSourceInfo(int imageWidth, int imageHeight, boolean isFlipped) { + Preconditions.checkState(imageWidth > 0, "image width must be positive"); + Preconditions.checkState(imageHeight > 0, "image height must be positive"); + synchronized (lock) { + this.imageWidth = imageWidth; + this.imageHeight = imageHeight; + this.isImageFlipped = isFlipped; + needUpdateTransformation = true; + } + postInvalidate(); + } + + public int getImageWidth() { + return imageWidth; + } + + public int getImageHeight() { + return imageHeight; + } + + private void updateTransformationIfNeeded() { + if (!needUpdateTransformation || imageWidth <= 0 || imageHeight <= 0) { + return; + } + float viewAspectRatio = (float) getWidth() / getHeight(); + float imageAspectRatio = (float) imageWidth / imageHeight; + postScaleWidthOffset = 0; + postScaleHeightOffset = 0; + if (viewAspectRatio > imageAspectRatio) { + // The image needs to be vertically cropped to be displayed in this view. + scaleFactor = (float) getWidth() / imageWidth; + postScaleHeightOffset = ((float) getWidth() / imageAspectRatio - getHeight()) / 2; + } else { + // The image needs to be horizontally cropped to be displayed in this view. + scaleFactor = (float) getHeight() / imageHeight; + postScaleWidthOffset = ((float) getHeight() * imageAspectRatio - getWidth()) / 2; + } + + transformationMatrix.reset(); + transformationMatrix.setScale(scaleFactor, scaleFactor); + transformationMatrix.postTranslate(-postScaleWidthOffset, -postScaleHeightOffset); + + if (isImageFlipped) { + transformationMatrix.postScale(-1f, 1f, getWidth() / 2f, getHeight() / 2f); + } + + needUpdateTransformation = false; + } + + /** Draws the overlay with its associated graphic objects. */ + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + synchronized (lock) { + updateTransformationIfNeeded(); + + for (Graphic graphic : graphics) { + graphic.draw(canvas); + } + } + } + + +} diff --git a/app/src/main/java/com/assimilate/alltrans/fragments/TranslateCameraFragment.java b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateCameraFragment.java new file mode 100644 index 0000000..eea57f2 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateCameraFragment.java @@ -0,0 +1,297 @@ +package com.assimilate.alltrans.fragments; + +import androidx.fragment.app.Fragment; + +public class TranslateCameraFragment extends Fragment { + +// private ActivityResultLauncher activityLauncher; +// private ActivityResultLauncher permissionLauncher; +// +// private FragmentTranslateCameraBinding mBinding; +// +// private TextToSpeech tts; +// +// private boolean translating = false; +// private boolean adLoading = false; // 广告是否处于加载中 +// private boolean collectCurrent = false; +// private String sourceText = ""; // 可能会有一种屌毛,翻译完成后,先去输入框删几个字符,然后再去点击收藏按钮。所以每次翻译前备份一下 +// +// @Override +// public void onCreate(@Nullable Bundle savedInstanceState) { +// super.onCreate(savedInstanceState); +// activityLauncher = registerForActivityResult(new ActivityResultContracts.StartActivityForResult(), new ActivityResultCallback() { +// @Override +// public void onActivityResult(ActivityResult result) { +// if (Activity.RESULT_OK == result.getResultCode() && null != result.getData()) { +// Intent data = result.getData(); +// String recognizedText = data.getStringExtra("recognizedText"); +// if (!TextUtils.isEmpty(recognizedText)) { +// mBinding.inputText.setText(recognizedText); +// translate(recognizedText); +// } +// } +// } +// }); +// permissionLauncher = registerForActivityResult(new ActivityResultContracts.RequestPermission(), new ActivityResultCallback() { +// @Override +// public void onActivityResult(Boolean result) { +// if (result.booleanValue()) { +// launchCameraApi(); +// } else { +// Widget.makeToast(getActivity(), "permission denied"); +// } +// } +// }); +// +// translating = false; +// adLoading = false; +// collectCurrent = false; +// +// tts = new TextToSpeech(getActivity(), new TextToSpeech.OnInitListener() { +// @Override +// public void onInit(int status) { +// if (null != tts && TextToSpeech.SUCCESS == status) +// tts.setLanguage(Locale.getDefault()); +// } +// }); +// } +// +// @Override +// public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { +// mBinding = FragmentTranslateCameraBinding.inflate(getLayoutInflater()); +// +// // 朗读原文 +// mBinding.speakSource.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 清空翻译输入框与结果文本 +// mBinding.clear.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// mBinding.result.setText(""); +// mBinding.inputText.setText(""); +// +// reset(); +// } +// }); +// // 朗读译文 +// mBinding.speakTarget.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 分享译文 +// mBinding.shareTrans.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// if (translating) { +// Widget.makeToast(getActivity(), "Translating..."); +// return; +// } +// +// final String share = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(share)) { +// Intent intent = new Intent(Intent.ACTION_SEND); +// intent.setType("text/plain"); +// intent.putExtra(Intent.EXTRA_TEXT, share); +// startActivity(Intent.createChooser(intent, "Share " + getString(R.string.app_name))); +// } +// } +// }); +// // 复制到粘贴板 +// mBinding.copyTrans.setOnClickListener(new View.OnClickListener() { +// private final String tip = "Copied to clipboard!"; +// @Override +// public void onClick(View v) { +// final String share = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(share)) { +// ClipboardManager clipboardManager = (ClipboardManager) getActivity().getSystemService(Context.CLIPBOARD_SERVICE); +// ClipData clipData = ClipData.newPlainText("targetValue", share); +// clipboardManager.setPrimaryClip(clipData); +// Widget.makeToast(getActivity(), tip); +// } +// } +// }); +// // 收藏按钮:翻译中禁止收藏, 无原文禁止收藏,无译文禁止收藏 +// mBinding.collectTrans.setOnClickListener(new View.OnClickListener() { +// private DbTranslation dbTranslation; +// +// @Override +// public void onClick(View v) { +// if (translating) return; +// +// final String sourceTxt = sourceText; +// if (TextUtils.isEmpty(sourceTxt)) return; +// +// if (collectCurrent) { +// collectCurrent = false; +// getDbTranslation(getActivity()).collectJust(false); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// } else { +// collectCurrent = true; +// getDbTranslation(getActivity()).collectJust(true); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collectedtrans); +// } +// } +// +// private DbTranslation getDbTranslation(Context context) { +// if (null == dbTranslation) { +// dbTranslation = new DbTranslation(context); +// } +// return dbTranslation; +// } +// }); +// +// mBinding.changeLanguage.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// launchLanguageSet(); +// } +// }); +// +// return mBinding.getRoot(); +// } +// +// @Override +// public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { +// super.onViewCreated(view, savedInstanceState); +// } +// +// @Override +// public void onResume() { +// super.onResume(); +// +// // 每次回来可能会更新 +// mBinding.languageSource.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.languageTarget.setText(TranslateWordApp.getTargetLanguage()); +// mBinding.sourceLanguage2.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.targetLanguage2.setText(TranslateWordApp.getTargetLanguage()); +// +// // TODO: 可以判断是否需要再次请求原生广告 +// } +// +// @Override +// public void onStop() { +// super.onStop(); +// +// if (null != tts) tts.stop(); +// } +// +// @Override +// public void onDestroy() { +// super.onDestroy(); +// if (null != tts) tts.shutdown(); +// } +// +// public void launchCamera() { +// if (withCameraPermission()) { +// launchCameraApi(); +// } else { +// if (null != permissionLauncher) { +// permissionLauncher.launch(Manifest.permission.CAMERA); +// } +// } +// } +// +// private void launchCameraApi() { +// if (null != activityLauncher) { +// activityLauncher.launch(new Intent(getActivity(), TranslateCameraActivity.class)); +// } +// } +// +// private void translate(@NonNull final String text) { +// // step1. 叫用户检查网络连接 +// if (translating) { +// // 第一次点击翻译按钮后 可能会延迟响应结果,翻译期间再次点击翻译按钮无效 +// Logger.d("log", "translating(not post data)..."); +// return; +// } +// Logger.d("log", "translating..."); +// reset(); +// +// translating = true; +// final HashMap param = new HashMap<>(); +// param.put("sourceLanguage", TranslateWordApp.getSourceLanguageCode()); +// param.put("translationLanguage", TranslateWordApp.getTargetLanguageCode()); +// param.put("text", text); +// +// sourceText = text; +// mBinding.result.setText("translating..."); +// Translator translator = new GoogleTranslator(); +// translator.translate(param, new GoogleTranslator.GoogleTranslateCallback() { +// @Override +// public void onResponse(String val) { +// translating = false; +// +// if (!TextUtils.isEmpty(val)) { +// TranslateMainActivity activity = null; +// if (getActivity() instanceof TranslateMainActivity) { +// activity = (TranslateMainActivity) getActivity(); +// } +// if (null != activity) { +// activity.runOnUiThread(new Runnable() { +// @Override +// public void run() { +// mBinding.result.setText(val); +// addLog(val); +// } +// }); +// } +// } +// } +// +// private void addLog(String targetTxt) { +// FragmentActivity activity = getActivity(); +// if (null != activity) { +// DbTranslation dbTranslation = new DbTranslation(activity); +// Translations translations = new Translations(TranslateWordApp.getSourceLanguage(), sourceText, TranslateWordApp.getTargetLanguage(), targetTxt); +// dbTranslation.addTranslation(translations); +// } +// } +// }); +// } +// +// private void reset() { +// // 归位收藏图片 +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// // 归位收藏备份文本 +// sourceText = ""; +// // 设置当前未处于收藏状态 +// collectCurrent = false; +// // 当前不处于翻译状态 +// translating = false; +// } +// +// private boolean withCameraPermission() { +// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { +// FragmentActivity activity = getActivity(); +// if (null == activity) return false; +// return ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; +// } else { +// return true; +// } +// } +// +// private void launchLanguageSet() { +// Intent intent = new Intent(getActivity(), TranslateChangeLanguageActivity.class); +// getActivity().startActivity(intent); +// } +// +// +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/fragments/TranslateTextFragment.java b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateTextFragment.java new file mode 100644 index 0000000..029beb1 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateTextFragment.java @@ -0,0 +1,280 @@ +package com.assimilate.alltrans.fragments; + +import androidx.fragment.app.Fragment; + +public class TranslateTextFragment extends Fragment { + +// private FragmentTranslateTextBinding mBinding; +// +// private TextToSpeech tts; +// +// private boolean translating = false; +// private boolean adLoading = false; // 广告是否处于加载中 +// private boolean collectCurrent = false; +// +// private String sourceText = ""; // 可能会有一种屌毛,翻译完成后,先去输入框删几个字符,然后再去点击收藏按钮。所以每次翻译前备份一下 +// +// @Override +// public void onCreate(@Nullable Bundle savedInstanceState) { +// super.onCreate(savedInstanceState); +// +// translating = false; +// adLoading = false; +// collectCurrent = false; +// +// tts = new TextToSpeech(getActivity(), new TextToSpeech.OnInitListener() { +// @Override +// public void onInit(int status) { +// if (null != tts && TextToSpeech.SUCCESS == status) +// tts.setLanguage(Locale.getDefault()); +// } +// }); +// } +// +// @SuppressLint("ClickableViewAccessibility") +// @Override +// public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { +// mBinding = FragmentTranslateTextBinding.inflate(getLayoutInflater()); +// +// // 朗读原文 +// mBinding.speakSource.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 清空翻译输入框与结果文本 +// mBinding.clear.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// mBinding.result.setText(""); +// mBinding.inputText.setText(""); +// +// reset(); +// } +// }); +// // 输入enter键后直接翻译 & 关闭软件盘与失去光标闪烁 +// mBinding.inputText.setOnTouchListener((view1, motionEvent) -> { +// mBinding.inputText.setCursorVisible(true); +// return false; +// }); +// mBinding.inputText.setOnEditorActionListener(new TextView.OnEditorActionListener() { +// @Override +// public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) { +// if (i == EditorInfo.IME_ACTION_SEARCH) { +// hiddenSoftKeyboard(); +// final String text = mBinding.inputText.getText().toString(); +// if (!TextUtils.isEmpty(text)) { +// translate(text); +// } +// return true; +// } +// return false; +// } +// +// private void hiddenSoftKeyboard() { +// final FragmentActivity activity = getActivity(); +// final IBinder binder = mBinding.inputText.getWindowToken(); +// if (null != activity && null != binder) { +// InputMethodManager inputMethodManager = +// (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE); +// boolean hideSoftInputFromWindow = +// inputMethodManager.hideSoftInputFromWindow(binder, InputMethodManager.HIDE_NOT_ALWAYS); +// if (hideSoftInputFromWindow) { +// mBinding.inputText.setCursorVisible(false); +// } +// } +// } +// }); +// +// // 朗读译文 +// mBinding.speakTarget.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 分享译文 +// mBinding.shareTrans.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// if (translating) { +// Widget.makeToast(getActivity(), "Translating..."); +// return; +// } +// +// final String share = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(share)) { +// Intent intent = new Intent(Intent.ACTION_SEND); +// intent.setType("text/plain"); +// intent.putExtra(Intent.EXTRA_TEXT, share); +// startActivity(Intent.createChooser(intent, "Share " + getString(R.string.app_name))); +// } +// } +// }); +// // 复制到粘贴板 +// mBinding.copyTrans.setOnClickListener(new View.OnClickListener() { +// private final String tip = "Copied to clipboard!"; +// @Override +// public void onClick(View v) { +// final String share = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(share)) { +// ClipboardManager clipboardManager = (ClipboardManager) getActivity().getSystemService(Context.CLIPBOARD_SERVICE); +// ClipData clipData = ClipData.newPlainText("targetValue", share); +// clipboardManager.setPrimaryClip(clipData); +// Widget.makeToast(getActivity(), tip); +// } +// } +// }); +// // 收藏按钮:翻译中禁止收藏, 无原文禁止收藏,无译文禁止收藏 +// mBinding.collectTrans.setOnClickListener(new View.OnClickListener() { +// private DbTranslation dbTranslation; +// +// @Override +// public void onClick(View v) { +// if (translating) return; +// +// final String sourceTxt = sourceText; +// if (TextUtils.isEmpty(sourceTxt)) return; +// +// if (collectCurrent) { +// collectCurrent = false; +// getDbTranslation(getActivity()).collectJust(false); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// } else { +// collectCurrent = true; +// getDbTranslation(getActivity()).collectJust(true); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collectedtrans); +// } +// } +// +// private DbTranslation getDbTranslation(Context context) { +// if (null == dbTranslation) { +// dbTranslation = new DbTranslation(context); +// } +// return dbTranslation; +// } +// }); +// +// mBinding.changeLanguage.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// launchLanguageSet(); +// } +// }); +// +// return mBinding.getRoot(); +// } +// +// @Override +// public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { +// super.onViewCreated(view, savedInstanceState); +// } +// +// @Override +// public void onResume() { +// super.onResume(); +// +// // 每次回来可能会更新 +// mBinding.languageSource.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.languageTarget.setText(TranslateWordApp.getTargetLanguage()); +// mBinding.sourceLanguage2.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.targetLanguage2.setText(TranslateWordApp.getTargetLanguage()); +// +// // TODO: 可以判断是否需要再次请求原生广告 +// } +// +// @Override +// public void onStop() { +// super.onStop(); +// if (null != tts) tts.stop(); +// } +// +// @Override +// public void onDestroy() { +// super.onDestroy(); +// if (null != tts) tts.shutdown(); +// } +// +// private void translate(@NonNull final String text) { +// // step1. 叫用户检查网络连接 +// if (translating) { +// // 第一次点击翻译按钮后 可能会延迟响应结果,翻译期间再次点击翻译按钮无效 +// Logger.d("log", "translating(not post data)..."); +// return; +// } +// Logger.d("log", "translating..."); +// reset(); +// +// translating = true; +// final HashMap param = new HashMap<>(); +// param.put("sourceLanguage", TranslateWordApp.getSourceLanguageCode()); +// param.put("translationLanguage", TranslateWordApp.getTargetLanguageCode()); +// param.put("text", text); +// +// sourceText = text; +// mBinding.result.setText("translating..."); +// Translator translator = new GoogleTranslator(); +// translator.translate(param, new GoogleTranslator.GoogleTranslateCallback() { +// @Override +// public void onResponse(String val) { +// translating = false; +// +// if (!TextUtils.isEmpty(val)) { +// TranslateMainActivity activity = null; +// if (getActivity() instanceof TranslateMainActivity) { +// activity = (TranslateMainActivity) getActivity(); +// } +// if (null != activity) { +// activity.runOnUiThread(new Runnable() { +// @Override +// public void run() { +// mBinding.result.setText(val); +// addLog(val); +// } +// }); +// } +// } +// } +// +// private void addLog(String targetTxt) { +// FragmentActivity activity = getActivity(); +// if (null != activity) { +// DbTranslation dbTranslation = new DbTranslation(activity); +// Translations translations = new Translations(TranslateWordApp.getSourceLanguage(), sourceText, TranslateWordApp.getTargetLanguage(), targetTxt); +// dbTranslation.addTranslation(translations); +// } +// } +// }); +// } +// +// private void reset() { +// // 归位收藏图片 +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// // 归位收藏备份文本 +// sourceText = ""; +// // 设置当前未处于收藏状态 +// collectCurrent = false; +// // 当前不处于翻译状态 +// translating = false; +// } +// +// private void launchLanguageSet() { +// Intent intent = new Intent(getActivity(), TranslateChangeLanguageActivity.class); +// getActivity().startActivity(intent); +// } +// +// +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/fragments/TranslateVoiceFragment.java b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateVoiceFragment.java new file mode 100644 index 0000000..46f5da9 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/fragments/TranslateVoiceFragment.java @@ -0,0 +1,285 @@ +package com.assimilate.alltrans.fragments; + + +import androidx.fragment.app.Fragment; + +// https://play.google.com/store/apps/details?id=com.google.android.tts +public class TranslateVoiceFragment extends Fragment { + +// private FragmentTranslateVoiceBinding mBinding; +// private ActivityResultLauncher launcher; +// +// private TextToSpeech tts; +// +// private boolean translating = false; +// private boolean adLoading = false; // 广告是否处于加载中 +// private boolean collectCurrent = false; +// +// private String sourceText = ""; // 可能会有一种屌毛,翻译完成后,先去输入框删几个字符,然后再去点击收藏按钮。所以每次翻译前备份一下 +// +// @Override +// public void onCreate(@Nullable Bundle savedInstanceState) { +// super.onCreate(savedInstanceState); +// +// translating = false; +// adLoading = false; +// collectCurrent = false; +// +// tts = new TextToSpeech(getActivity(), new TextToSpeech.OnInitListener() { +// @Override +// public void onInit(int status) { +// if (null != tts && TextToSpeech.SUCCESS == status) +// tts.setLanguage(Locale.getDefault()); +// } +// }); +// +// launcher = registerForActivityResult(new ActivityResultContracts.StartActivityForResult(), new ActivityResultCallback() { +// @Override +// public void onActivityResult(ActivityResult result) { +// Intent data = result.getData(); +// if (Activity.RESULT_OK == result.getResultCode() && null != data) { +// String speech = data.getStringArrayListExtra("android.speech.extra.RESULTS").get(0); +// if (!TextUtils.isEmpty(speech)) { +// mBinding.inputText.setText(speech); +// translate(speech); +// } +// } +// } +// }); +// } +// +// @Override +// public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { +// mBinding = FragmentTranslateVoiceBinding.inflate(getLayoutInflater()); +// +// // 朗读原文 +// mBinding.speakSource.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 清空翻译输入框与结果文本 +// mBinding.clear.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// mBinding.result.setText(""); +// mBinding.inputText.setText(""); +// +// reset(); +// } +// }); +// // 说话完成后,自动翻译 +// mBinding.startSpeak.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// Intent speech_intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); +// speech_intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 5000); // 设置5秒的静默时间 +// speech_intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 5000); // 设置5秒的可能完全静默时间 +// +// speech_intent.putExtra("android.speech.extra.LANGUAGE_MODEL", TranslateWordApp.getSourceLanguage()); +// speech_intent.putExtra("android.speech.extra.LANGUAGE", TranslateWordApp.getSourceLanguageCode()); +// speech_intent.putExtra("android.speech.extra.LANGUAGE_PREFERENCE", TranslateWordApp.getSourceLanguage()); +// try { +// if (null != launcher) launcher.launch(speech_intent); +// } catch (ActivityNotFoundException ea) { +// Widget.makeToast(getActivity(), "Something went wrong."); +// } +// } +// }); +// +// // 朗读译文 +// mBinding.speakTarget.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// String speech = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(speech)) { +// if (null != tts +// && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { +// tts.speak(speech, 0, null, null); +// } +// } +// } +// }); +// // 分享译文 +// mBinding.shareTrans.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// if (translating) { +// Widget.makeToast(getActivity(), "Translating..."); +// return; +// } +// +// final String share = mBinding.result.getText().toString().trim(); +// if (!TextUtils.isEmpty(share)) { +// Intent intent = new Intent(Intent.ACTION_SEND); +// intent.setType("text/plain"); +// intent.putExtra(Intent.EXTRA_TEXT, share); +// startActivity(Intent.createChooser(intent, "Share " + getString(R.string.app_name))); +// } +// } +// }); +// // 复制到粘贴板 +// mBinding.copyTrans.setOnClickListener(new View.OnClickListener() { +// private final String tip = "Copied to clipboard!"; +// @Override +// public void onClick(View v) { +// final String share = mBinding.result.getText().toString().trim(); +// if (!translating && !TextUtils.isEmpty(share)) { +// ClipboardManager clipboardManager = (ClipboardManager) getActivity().getSystemService(Context.CLIPBOARD_SERVICE); +// ClipData clipData = ClipData.newPlainText("targetValue", share); +// clipboardManager.setPrimaryClip(clipData); +// Widget.makeToast(getActivity(), tip); +// } +// } +// }); +// // 收藏按钮:翻译中禁止收藏, 无原文禁止收藏,无译文禁止收藏 +// mBinding.collectTrans.setOnClickListener(new View.OnClickListener() { +// private DbTranslation dbTranslation; +// +// @Override +// public void onClick(View v) { +// if (translating) return; +// +// final String sourceTxt = sourceText; +// if (TextUtils.isEmpty(sourceTxt)) return; +// +// if (collectCurrent) { +// collectCurrent = false; +// getDbTranslation(getActivity()).collectJust(false); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// } else { +// collectCurrent = true; +// getDbTranslation(getActivity()).collectJust(true); +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collectedtrans); +// } +// } +// +// private DbTranslation getDbTranslation(Context context) { +// if (null == dbTranslation) { +// dbTranslation = new DbTranslation(context); +// } +// return dbTranslation; +// } +// }); +// +// mBinding.changeLanguage.setOnClickListener(new View.OnClickListener() { +// @Override +// public void onClick(View v) { +// launchLanguageSet(); +// } +// }); +// +// return mBinding.getRoot(); +// } +// +// @Override +// public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { +// super.onViewCreated(view, savedInstanceState); +// +// } +// +// @Override +// public void onResume() { +// super.onResume(); +// +// mBinding.languageSource.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.languageTarget.setText(TranslateWordApp.getTargetLanguage()); +// mBinding.sourceLanguage2.setText(TranslateWordApp.getSourceLanguage()); +// mBinding.targetLanguage2.setText(TranslateWordApp.getTargetLanguage()); +// +// // TODO: 可以判断是否需要再次请求原生广告 +// } +// +// @Override +// public void onStop() { +// super.onStop(); +// if (null != tts) tts.stop(); +// } +// +// @Override +// public void onDestroy() { +// super.onDestroy(); +// if (null != tts) tts.shutdown(); +// if (null != launcher) { +// launcher.unregister(); +// launcher = null; +// } +// } +// +// private void translate(@NonNull final String text) { +// // step1. 叫用户检查网络连接 +// if (translating) { +// // 第一次点击翻译按钮后 可能会延迟响应结果,翻译期间再次点击翻译按钮无效 +// Logger.d("log", "translating(not post data)..."); +// return; +// } +// Logger.d("log", "translating..."); +// reset(); +// +// translating = true; +// final HashMap param = new HashMap<>(); +// param.put("sourceLanguage", TranslateWordApp.getSourceLanguageCode()); +// param.put("translationLanguage", TranslateWordApp.getTargetLanguageCode()); +// param.put("text", text); +// +// sourceText = text; +// mBinding.result.setText("translating..."); +// Translator translator = new GoogleTranslator(); +// translator.translate(param, new GoogleTranslator.GoogleTranslateCallback() { +// @Override +// public void onResponse(String val) { +// translating = false; +// +// if (!TextUtils.isEmpty(val)) { +// TranslateMainActivity activity = null; +// if (getActivity() instanceof TranslateMainActivity) { +// activity = (TranslateMainActivity) getActivity(); +// } +// if (null != activity) { +// activity.runOnUiThread(new Runnable() { +// @Override +// public void run() { +// mBinding.result.setText(val); +// addLog(val); +// } +// }); +// } +// } +// } +// +// private void addLog(String targetTxt) { +// FragmentActivity activity = getActivity(); +// if (null != activity) { +// DbTranslation dbTranslation = new DbTranslation(activity); +// Translations translations = new Translations(TranslateWordApp.getSourceLanguage(), sourceText, TranslateWordApp.getTargetLanguage(), targetTxt); +// dbTranslation.addTranslation(translations); +// } +// } +// }); +// } +// +// private void reset() { +// // 归位收藏图片 +// mBinding.collectTrans.setImageResource(R.mipmap.trw_ic_collecttrans); +// // 归位收藏备份文本 +// sourceText = ""; +// // 设置当前未处于收藏状态 +// collectCurrent = false; +// // 当前不处于翻译状态 +// translating = false; +// } +// +// private void launchLanguageSet() { +// Intent intent = new Intent(getActivity(), TranslateChangeLanguageActivity.class); +// getActivity().startActivity(intent); +// } +// + +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/http/GoogleTranslator.java b/app/src/main/java/com/assimilate/alltrans/http/GoogleTranslator.java new file mode 100644 index 0000000..9e3f528 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/http/GoogleTranslator.java @@ -0,0 +1,118 @@ +package com.assimilate.alltrans.http; + +import android.text.TextUtils; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; + +import java.util.HashMap; + +import retrofit2.Call; +import retrofit2.Callback; +import retrofit2.Response; +import retrofit2.Retrofit; +import retrofit2.http.GET; +import retrofit2.http.Query; +import retrofit2.http.Url; + +public class GoogleTranslator extends Translator{ + private final static String URL = "https://translate.googleapis.com/translate_a/single"; + private final static String CLIENT = "gtx"; + private final static String DT = "t"; + + /** + * { + * "inputEncode": "UTF-8", 输入编码,有默认值,可不传递 + * "outputEncode": "UTF-8", 响应编码,有默认值,可不传递 + * "sourceLanguage": "English", 原文语言,不可为空 + * "translationLanguage": "Chinese", 译文语言,不可为空 + * "text": "翻译内容" + * } + * @param params 输入参数集 + * @param googleTranslateCallback 响应集合 + */ + @Override + public void translate(HashMap params, GoogleTranslateCallback googleTranslateCallback) { + if (null == googleTranslateCallback) { + return; + } + if (null == params || params.isEmpty()) { + googleTranslateCallback.onResponse(null); + return; + } + + String text = params.get("text"); + if (TextUtils.isEmpty(text) || text.trim().isEmpty()) { + googleTranslateCallback.onResponse(null); + return; + } + else { + text = text.trim(); + } + String sl = params.get("sourceLanguage"); + if (TextUtils.isEmpty(sl)) { + googleTranslateCallback.onResponse(null); + return; + } + String tl = params.get("translationLanguage"); + if (TextUtils.isEmpty(sl)) { + googleTranslateCallback.onResponse(null); + return; + } + + String ie = params.get("inputEncode"); + ie = TextUtils.isEmpty(ie) ? "UTF-8" : ie; + String oe = params.get("outputEncode"); + oe = TextUtils.isEmpty(oe) ? "UTF-8" : oe; + + RetrofitClient retrofitClient = RetrofitClient.getInstance(); + Retrofit retrofit = retrofitClient.getRetrofitClient(); + GoogleTranslateApi googleTranslateApi = retrofit.create(GoogleTranslateApi.class); + googleTranslateApi.api( + URL, CLIENT, DT, ie, oe, sl, tl, text + ).enqueue(new Callback() { + @Override + public void onResponse(Call call, Response response) { + JsonArray jsonArray = response.body(); + if (null != jsonArray && !jsonArray.isEmpty()) { + try { + StringBuilder builder = new StringBuilder(); + JsonArray array = jsonArray.get(0).getAsJsonArray(); + if (null != array && !array.isEmpty()) { + for (JsonElement jsonElement : array) { + builder.append(jsonElement.getAsJsonArray().get(0).getAsString()); + } + googleTranslateCallback.onResponse(builder.toString()); + } + } catch (Exception e) { + googleTranslateCallback.onResponse(null); + } + } else { + googleTranslateCallback.onResponse(null); + } + } + + @Override + public void onFailure(Call call, Throwable t) { + googleTranslateCallback.onResponse(null); + } + }); + } + + public interface GoogleTranslateApi { + @GET + Call api( + @Url String url, // google 固定值 + @Query("client") String client, // google 固定值 + @Query("dt") String dt, // google 固定值 + @Query("ie") String ie, + @Query("oe") String oe, + @Query("sl") String sl, + @Query("tl") String tl, + @Query("text") String text); + } + + public interface GoogleTranslateCallback { + void onResponse(String val); + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/http/RetrofitClient.java b/app/src/main/java/com/assimilate/alltrans/http/RetrofitClient.java new file mode 100644 index 0000000..e4e12b6 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/http/RetrofitClient.java @@ -0,0 +1,59 @@ +package com.assimilate.alltrans.http; + +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import okhttp3.OkHttpClient; +import retrofit2.Retrofit; +import retrofit2.converter.gson.GsonConverterFactory; + +public class RetrofitClient { + private static boolean instanced = false; + private volatile static RetrofitClient retrofitClient; + + private Retrofit retrofit; + + private RetrofitClient() { + synchronized (RetrofitClient.class) { + if (instanced) { + throw new RuntimeException("Instance multiple RetrofitClient."); + } else { + instanced = true; + } + if (retrofitClient != null) { + throw new RuntimeException("Instance multiple RetrofitClient."); + } + initRetrofitClient(); + } + } + + public static RetrofitClient getInstance() { + if (retrofitClient == null) { + synchronized (RetrofitClient.class) { + if(retrofitClient == null) { + retrofitClient = new RetrofitClient(); + } + } + } + return retrofitClient; + } + + private void initRetrofitClient() { + final OkHttpClient okHttpClient = new OkHttpClient.Builder() + .connectTimeout(10, TimeUnit.SECONDS) + .readTimeout(30, TimeUnit.SECONDS) + .writeTimeout(30, TimeUnit.SECONDS) + .build(); + + final Retrofit.Builder builder = new Retrofit.Builder(); + builder.client(okHttpClient); + builder.baseUrl("https://translate.google.com/") + .addConverterFactory(GsonConverterFactory.create()); + builder.callbackExecutor(Executors.newSingleThreadExecutor()); + this.retrofit = builder.build(); + } + + public Retrofit getRetrofitClient() { + return retrofit; + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/http/Translator.java b/app/src/main/java/com/assimilate/alltrans/http/Translator.java new file mode 100644 index 0000000..1c87bf7 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/http/Translator.java @@ -0,0 +1,7 @@ +package com.assimilate.alltrans.http; + +import java.util.HashMap; + +public abstract class Translator { + public abstract void translate(final HashMap params, Callback callback); +} diff --git a/app/src/main/java/com/assimilate/alltrans/mydb/DbTranslation.java b/app/src/main/java/com/assimilate/alltrans/mydb/DbTranslation.java new file mode 100644 index 0000000..a7a20d0 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/mydb/DbTranslation.java @@ -0,0 +1,194 @@ +package com.assimilate.alltrans.mydb; + +import android.content.ContentValues; +import android.content.Context; +import android.database.Cursor; +import android.database.sqlite.SQLiteDatabase; +import android.database.sqlite.SQLiteOpenHelper; +import android.text.TextUtils; +import android.util.Log; + +import androidx.annotation.NonNull; + + +import com.assimilate.alltrans.common.Logger; + +import java.util.ArrayList; +import java.util.Collections; + +public class DbTranslation extends SQLiteOpenHelper { + private final static String TABLE = "trans"; + + public DbTranslation(@NonNull final Context mContext) { + super(mContext, "trw_trans_log.db", null, 1); + } + + @Override + public void onCreate(SQLiteDatabase db) { + db.execSQL("CREATE TABLE IF NOT EXISTS " + TABLE + " (" + + "id INTEGER PRIMARY KEY AUTOINCREMENT, " + + "source_lan TEXT, " + + "source_txt TEXT, " + + "target_lan TEXT, " + + "target_txt TEXT," + + "current_time_millis INTEGER," + + "exist int DEFAULT 1, " + + "collection int DEFAULT 0);" + ); + } + + @Override + public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { + db.execSQL("DROP TABLE IF EXISTS " + TABLE); + onCreate(db); + } + + /** + * 新增一条翻译记录 + * @param translations 翻译记录 + */ + public boolean addTranslation(Translations translations) { + if (null == translations) return false; + + final String sourceLanguage = translations.getSourceLanguage(); + if (TextUtils.isEmpty(sourceLanguage)) return false; + + final String sourceText = translations.getSourceTxt().trim(); + if (TextUtils.isEmpty(sourceText)) return false; + + final String targetLanguage = translations.getTargetLanguage(); + if (TextUtils.isEmpty(targetLanguage)) return false; + + final String targetTxt = translations.getTargetTxt().trim(); + if (TextUtils.isEmpty(targetTxt)) return false; + + try (SQLiteDatabase sqLiteDatabase = getWritableDatabase()) { + ContentValues contentValues = new ContentValues(); + contentValues.put("source_lan", sourceLanguage); + contentValues.put("source_txt", sourceText); + contentValues.put("target_lan", targetLanguage); + contentValues.put("target_txt", targetTxt); + contentValues.put("current_time_millis", System.currentTimeMillis()); + + long insert = sqLiteDatabase.insert(TABLE, null, contentValues); + Logger.d("insert", "response: " + insert); + + return -1 != insert; + } catch (Exception exception) { + Log.d("SQLite: ", "at addTranslation: " + exception.getMessage()); + return false; + } + } + + /** + * 移除翻译记录 + * @param ids {id...} + */ + public void removeTranslations(ArrayList ids) { + if (null == ids || ids.isEmpty()) return; + try (SQLiteDatabase database = getWritableDatabase()) { + ContentValues values = new ContentValues(); + values.put("exist", 0); + for (Long id : ids) { + database.update(TABLE, values, "id = ?", new String[]{String.valueOf(id)}); + } + } catch (Exception exception) { + Log.d("SQLite: ", "at removeTranslations: " + exception.getMessage()); + } + } + + /** + * 移除收藏记录 + * @param ids {id...} + */ + public void removeCollectTranslations(ArrayList ids) { + if (null == ids || ids.isEmpty()) return; + try (SQLiteDatabase database = getWritableDatabase()) { + ContentValues values = new ContentValues(); + values.put("collection", 0); + for (Long id : ids) { + database.update(TABLE, values, "id = ?", new String[]{String.valueOf(id)}); + } + } catch (Exception exception) { + Log.d("SQLite: ", "at removeTranslations: " + exception.getMessage()); + } + } + + /** + * 是否收藏刚刚那条翻译 + * @param collect 收藏|不收藏 + */ + public boolean collectJust(boolean collect) { + final int value = collect ? 1 : 0; + try (SQLiteDatabase database = getWritableDatabase();) { + ContentValues values = new ContentValues(); + values.put("collection", value); + String selection = "id = (SELECT MAX(id) FROM " + TABLE +")"; + int result = database.update(TABLE, values, selection, null); + return result > 0; + } catch (Exception exception) { + Log.d("SQLite: ", "at collectionJust: " + exception.getMessage()); + return false; + } + } + + /** + * 得到翻译记录 + * @param filterUnCollect 是否需要过滤掉没有收藏的记录 + */ + public ArrayList getTranslations(boolean filterUnCollect) { + final ArrayList logs = new ArrayList<>(); + Cursor cursor = null; + try (SQLiteDatabase database = getWritableDatabase();) { + final String sql = "SELECT id, source_lan, source_txt, target_lan, target_txt, " + + "current_time_millis, exist, collection FROM " + TABLE; + String selection = "exist = ?"; + String[] selectionArgs = {"1"}; + cursor = database.rawQuery(sql + " WHERE " + selection, selectionArgs); + + if (cursor != null) { + final int idIndex = cursor.getColumnIndex("id"); + final int sourceLanIndex = cursor.getColumnIndex("source_lan"); + final int sourceTxtIndex = cursor.getColumnIndex("source_txt"); + final int targetLanIndex = cursor.getColumnIndex("target_lan"); + final int targetTxtIndex = cursor.getColumnIndex("target_txt"); + final int currentTimeMillisIndex = cursor.getColumnIndex("current_time_millis"); + final int existIndex = cursor.getColumnIndex("exist"); + final int collectionIndex = cursor.getColumnIndex("collection"); + + if (cursor.moveToFirst()) { + do { + if (idIndex != -1) { + long id = cursor.getInt(idIndex); + String sourceLanguage = cursor.getString(sourceLanIndex); + String sourceText = cursor.getString(sourceTxtIndex); + String targetLanguage = cursor.getString(targetLanIndex); + String targetText = cursor.getString(targetTxtIndex); + long currentTimeMillis = cursor.getLong(currentTimeMillisIndex); + int exist = cursor.getInt(existIndex); + int collection = cursor.getInt(collectionIndex); + + if (exist == 1) { + // 是否需要过滤掉没有收藏的简便写法 + if (!filterUnCollect || collection == 1) { + Translations translations = new Translations(id, sourceLanguage, sourceText, targetLanguage, targetText, currentTimeMillis, exist, collection); + logs.add(translations); + } + } + + } + } while (cursor.moveToNext()); + } + + } + } catch (Exception exception) { + Log.d("SQLite: ", "at getHistory: " + exception.getMessage()); + } finally { + if (null != cursor) { + cursor.close(); + } + } + Collections.reverse(logs); + return logs; + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/mydb/Translations.java b/app/src/main/java/com/assimilate/alltrans/mydb/Translations.java new file mode 100644 index 0000000..e30a0a0 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/mydb/Translations.java @@ -0,0 +1,94 @@ +package com.assimilate.alltrans.mydb; + +public class Translations { + private long id; + private String sourceLanguage; + private String sourceTxt; + private String targetLanguage; + private String targetTxt; + private long currentTimeMillis; + private int exist; + private int collection; + + public Translations(long id, String sourceLanguage, String sourceTxt, String targetLanguage, String targetTxt, long currentTimeMillis, int exist, int collection) { + this.id = id; + this.sourceLanguage = sourceLanguage; + this.sourceTxt = sourceTxt; + this.targetLanguage = targetLanguage; + this.targetTxt = targetTxt; + this.currentTimeMillis = currentTimeMillis; + this.exist = exist; + this.collection = collection; + } + + public Translations(String sourceLanguage, String sourceTxt, String targetLanguage, String targetTxt) { + this.sourceLanguage = sourceLanguage; + this.sourceTxt = sourceTxt; + this.targetLanguage = targetLanguage; + this.targetTxt = targetTxt; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getSourceLanguage() { + return sourceLanguage; + } + + public void setSourceLanguage(String sourceLanguage) { + this.sourceLanguage = sourceLanguage; + } + + public String getSourceTxt() { + return sourceTxt; + } + + public void setSourceTxt(String sourceTxt) { + this.sourceTxt = sourceTxt; + } + + public String getTargetLanguage() { + return targetLanguage; + } + + public void setTargetLanguage(String targetLanguage) { + this.targetLanguage = targetLanguage; + } + + public String getTargetTxt() { + return targetTxt; + } + + public void setTargetTxt(String targetTxt) { + this.targetTxt = targetTxt; + } + + public long getCurrentTimeMillis() { + return currentTimeMillis; + } + + public void setCurrentTimeMillis(long currentTimeMillis) { + this.currentTimeMillis = currentTimeMillis; + } + + public int getExist() { + return exist; + } + + public void setExist(int exist) { + this.exist = exist; + } + + public int getCollection() { + return collection; + } + + public void setCollection(int collection) { + this.collection = collection; + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/HistoryActivity.java b/app/src/main/java/com/assimilate/alltrans/viewui/HistoryActivity.java new file mode 100644 index 0000000..06be3ed --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/HistoryActivity.java @@ -0,0 +1,168 @@ +package com.assimilate.alltrans.viewui; + +import android.content.Context; +import android.os.Bundle; +import android.speech.tts.TextToSpeech; +import android.view.View; + +import androidx.appcompat.app.AppCompatActivity; +import androidx.recyclerview.widget.LinearLayoutManager; + +import com.assimilate.alltrans.R; +import com.assimilate.alltrans.adapters.TranslationAdapter; +import com.assimilate.alltrans.common.Widget; +import com.assimilate.alltrans.databinding.ActivityHistoryBinding; +import com.assimilate.alltrans.mydb.DbTranslation; +import com.assimilate.alltrans.mydb.Translations; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Locale; + +public class HistoryActivity extends AppCompatActivity { + public final static String COMMAND = "remove"; + public final static String COMMAND_COLLECTION = "remove-collection"; + public final static String COMMAND_HISTORY = "remove-history"; + private TextToSpeech tts; + private ActivityHistoryBinding mBinding; + private HashSet ids; // 通过id 删除数据库文件 + private HashSet items; // 通许index 删除界面上面的数据 + private boolean operationCollection = false; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + mBinding = ActivityHistoryBinding.inflate(getLayoutInflater()); + setContentView(mBinding.getRoot()); + + ids = new HashSet<>(); + items = new HashSet<>(); + + String extra = getIntent().getStringExtra(COMMAND); + if (COMMAND_COLLECTION.equals(extra)) { + operationCollection = true; + } else { + operationCollection = false; + } + + tts = new TextToSpeech(this, new TextToSpeech.OnInitListener() { + @Override + public void onInit(int status) { + if (null != tts && TextToSpeech.SUCCESS == status) + tts.setLanguage(Locale.getDefault()); + } + }); + + ArrayList translations = new ArrayList<>(); + if (operationCollection) { + // 查出收藏的翻译记录 + mBinding.tvFuncTrans.setText("Collect"); + mBinding.ivFuncTrans.setImageResource(R.mipmap.ic_launcher); + ArrayList list = new DbTranslation(this).getTranslations(true); + if (null != list && !list.isEmpty()) { + translations.addAll(list); + } + } else { + // 查出所有的翻译记录 + mBinding.tvFuncTrans.setText("History"); + mBinding.ivFuncTrans.setImageResource(R.mipmap.ic_launcher); + ArrayList list = new DbTranslation(this).getTranslations(false); + if (null != list && !list.isEmpty()) { + translations.addAll(list); + } + } + final TranslationAdapter adapter = new TranslationAdapter(translations, new TranslationAdapter.TranslationItemCallback() { + @Override + public void updateList(TranslationAdapter.Operation operation, long id, int position) { + if (TranslationAdapter.Operation.ADD == operation) { + add(id, position); + } else if (TranslationAdapter.Operation.REMOVE == operation) { + remove(id, position); + } + updateBtn(); + } + + @Override + public void speech(String value) { + if (null != tts + && TextToSpeech.LANG_NOT_SUPPORTED != tts.isLanguageAvailable(Locale.getDefault())) { + tts.speak(value, 0, null, null); + } + } + + private void add(long id, int index) { + ids.add(id); + items.add(index); + } + + private void remove(long id, int index) { + ids.remove(id); + items.remove(index); + } + + private void updateBtn() { + if (ids.isEmpty()) { + mBinding.remove.setImageResource(R.mipmap.ic_launcher); + } else { + mBinding.remove.setImageResource(R.mipmap.ic_launcher); + } + } + }); + final LinearLayoutManager layoutManager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false); + + mBinding.remove.setOnClickListener(new View.OnClickListener() { + private DbTranslation dbTranslation; + + @Override + public void onClick(View v) { + if (ids.isEmpty()) { + Widget.makeToast(HistoryActivity.this, "Noting to remove."); + return; + } + + if (!ids.isEmpty()) { + ArrayList longArrayList = new ArrayList<>(ids); + if (operationCollection) { + getDbTranslation(HistoryActivity.this).removeCollectTranslations(longArrayList); + } else { + getDbTranslation(HistoryActivity.this).removeTranslations(longArrayList); + } + ids.clear(); + } + if (!items.isEmpty()) { + ArrayList integerArrayList = new ArrayList<>(items); + Collections.sort(integerArrayList, new Comparator() { + @Override + public int compare(Integer o1, Integer o2) { + return o2.compareTo(o1); + } + }); + adapter.updateSet(integerArrayList); + items.clear(); + } + mBinding.remove.setImageResource(R.mipmap.ic_launcher); + } + + private DbTranslation getDbTranslation(Context context) { + if (null == dbTranslation) { + dbTranslation = new DbTranslation(context); + } + return dbTranslation; + } + }); + + mBinding.histories.setLayoutManager(layoutManager); + mBinding.histories.setAdapter(adapter); + } + + @Override + public void onBackPressed() { + super.onBackPressed(); + } + + public void clickBack(View view) { + onBackPressed(); + } +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/LanguageChangeActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/LanguageChangeActivity.kt new file mode 100644 index 0000000..ad82128 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/LanguageChangeActivity.kt @@ -0,0 +1,24 @@ +package com.assimilate.alltrans.viewui + +import android.os.Bundle +import androidx.activity.enableEdgeToEdge +import androidx.appcompat.app.AppCompatActivity +import androidx.core.view.ViewCompat +import androidx.core.view.WindowInsetsCompat +import com.assimilate.alltrans.R +import com.assimilate.alltrans.databinding.ActivityLanguageChangeBinding + +class LanguageChangeActivity : AppCompatActivity() { + private lateinit var binding: ActivityLanguageChangeBinding + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + binding = ActivityLanguageChangeBinding.inflate(layoutInflater) + enableEdgeToEdge() + setContentView(binding.root) + ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> + val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) + v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) + insets + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/MainActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/MainActivity.kt new file mode 100644 index 0000000..52dfee4 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/MainActivity.kt @@ -0,0 +1,75 @@ +package com.assimilate.alltrans.viewui + +import android.content.Intent +import android.os.Bundle +import androidx.activity.enableEdgeToEdge +import androidx.appcompat.app.AppCompatActivity +import androidx.core.view.ViewCompat +import androidx.core.view.WindowInsetsCompat +import com.assimilate.alltrans.R +import com.assimilate.alltrans.databinding.ActivityMainBinding + +class MainActivity : AppCompatActivity() { + + + private lateinit var binding: ActivityMainBinding + + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + enableEdgeToEdge() + binding = ActivityMainBinding.inflate(layoutInflater) + setContentView(binding.root) + + ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> + val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) + v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) + insets + } + + initClick() + + } + + + private fun initClick() { + binding.tvMainPhotoTrans.setOnClickListener { + startActivity( + Intent(this, StillImageActivity::class.java) + ) + } + binding.tvMainTrans.setOnClickListener { + startActivity(Intent(this, TextResultActivity::class.java)) + } + binding.ivMainSetting.setOnClickListener { + startActivity( + Intent(this, SettingsActivity::class.java) + ) + } + binding.sourceLanguage2.setOnClickListener { + startActivity( + Intent(this, LanguageChangeActivity::class.java) + ) + } + binding.targetLanguage2.setOnClickListener { + startActivity( + Intent(this, LanguageChangeActivity::class.java) + ) + } + binding.ivMainHistory.setOnClickListener { + startActivity( + Intent(this, HistoryActivity::class.java) + ) + } + binding.llQuickSet.setOnClickListener { + startActivity( + Intent(this, QuickSetActivity::class.java) + ) + } + binding.ivQuickStart.setOnClickListener { + startActivity( + Intent(this, QuickSetActivity::class.java) + ) + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/MainActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/QuickSetActivity.kt similarity index 64% rename from app/src/main/java/com/assimilate/alltrans/MainActivity.kt rename to app/src/main/java/com/assimilate/alltrans/viewui/QuickSetActivity.kt index 247818f..0a3f6ee 100644 --- a/app/src/main/java/com/assimilate/alltrans/MainActivity.kt +++ b/app/src/main/java/com/assimilate/alltrans/viewui/QuickSetActivity.kt @@ -1,16 +1,22 @@ -package com.assimilate.alltrans +package com.assimilate.alltrans.viewui import android.os.Bundle import androidx.activity.enableEdgeToEdge import androidx.appcompat.app.AppCompatActivity import androidx.core.view.ViewCompat import androidx.core.view.WindowInsetsCompat +import com.assimilate.alltrans.R +import com.assimilate.alltrans.databinding.ActivityQuickSetBinding + +class QuickSetActivity : AppCompatActivity() { + + private lateinit var binding: ActivityQuickSetBinding -class MainActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) + binding = ActivityQuickSetBinding.inflate(layoutInflater) enableEdgeToEdge() - setContentView(R.layout.activity_main) + setContentView(binding.root) ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/SettingsActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/SettingsActivity.kt new file mode 100644 index 0000000..161ae97 --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/SettingsActivity.kt @@ -0,0 +1,48 @@ +package com.assimilate.alltrans.viewui + +import android.os.Bundle +import androidx.activity.enableEdgeToEdge +import androidx.appcompat.app.AppCompatActivity +import androidx.core.view.ViewCompat +import androidx.core.view.WindowInsetsCompat +import com.assimilate.alltrans.R +import com.assimilate.alltrans.databinding.ActivitySettingsBinding +import com.google.android.material.bottomsheet.BottomSheetDialog + + +class SettingsActivity + : AppCompatActivity() { + private lateinit var binding: ActivitySettingsBinding + private lateinit var bottomSheetDialog: BottomSheetDialog + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + enableEdgeToEdge() + binding = ActivitySettingsBinding.inflate(layoutInflater) + setContentView(binding.root) + ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> + val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) + v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) + insets + } + + + initView() + initClick() + } + + private fun initView() { + // 设置 BottomSheetDialog + bottomSheetDialog = BottomSheetDialog(this) + bottomSheetDialog.setContentView(R.layout.bottomsheet_rate) + bottomSheetDialog.dismissWithAnimation = true + + } + + private fun initClick() { + binding.llRate.setOnClickListener { + bottomSheetDialog.show() + + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/StillImageActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/StillImageActivity.kt new file mode 100644 index 0000000..a43927f --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/StillImageActivity.kt @@ -0,0 +1,387 @@ +package com.assimilate.alltrans.viewui + + +import android.app.Activity +import android.content.ContentValues +import android.content.Intent +import android.content.res.Configuration +import android.graphics.Bitmap +import android.net.Uri +import android.os.Bundle +import android.provider.MediaStore +import android.util.Log +import android.util.Pair +import android.view.MenuItem +import android.view.View +import android.view.ViewTreeObserver +import android.widget.AdapterView +import android.widget.AdapterView.OnItemSelectedListener +import android.widget.ArrayAdapter +import android.widget.ImageView +import android.widget.PopupMenu +import android.widget.Spinner +import android.widget.Toast +import androidx.appcompat.app.AppCompatActivity +import com.assimilate.alltrans.R +import com.assimilate.alltrans.common.BitmapUtils +import com.assimilate.alltrans.common.TextRecognitionProcessor +import com.assimilate.alltrans.common.VisionImageProcessor +import com.assimilate.alltrans.curview.GraphicOverlay +import com.google.android.gms.common.annotation.KeepName + +import com.google.mlkit.vision.text.chinese.ChineseTextRecognizerOptions +import com.google.mlkit.vision.text.devanagari.DevanagariTextRecognizerOptions +import com.google.mlkit.vision.text.japanese.JapaneseTextRecognizerOptions +import com.google.mlkit.vision.text.korean.KoreanTextRecognizerOptions +import com.google.mlkit.vision.text.latin.TextRecognizerOptions +import java.io.IOException + +/** Activity demonstrating different image detector features with a still image from camera. */ +@KeepName +class StillImageActivity : AppCompatActivity() { + private var preview: ImageView? = null + private var graphicOverlay: GraphicOverlay? = null + private var selectedMode = TEXT_RECOGNITION_CHINESE + private var selectedSize: String? = SIZE_SCREEN + private var isLandScape = false + private var imageUri: Uri? = null + + // Max width (portrait mode) + private var imageMaxWidth = 0 + + // Max height (portrait mode) + private var imageMaxHeight = 0 + private var imageProcessor: VisionImageProcessor? = null + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_still_image) + findViewById(R.id.select_image_button).setOnClickListener { view: View -> + // Menu for selecting either: a) take new photo b) select from existing + val popup = PopupMenu(this@StillImageActivity, view) + popup.setOnMenuItemClickListener { menuItem: MenuItem -> + val itemId = menuItem.itemId + if (itemId == R.id.select_images_from_local) { + startChooseImageIntentForResult() + return@setOnMenuItemClickListener true + } else if (itemId == R.id.take_photo_using_camera) { + startCameraIntentForResult() + return@setOnMenuItemClickListener true + } + false + } + val inflater = popup.menuInflater + inflater.inflate(R.menu.camera_button_menu, popup.menu) + popup.show() + } + preview = findViewById(R.id.preview) + graphicOverlay = findViewById(R.id.graphic_overlay) + + populateFeatureSelector() + populateSizeSelector() + isLandScape = resources.configuration.orientation == Configuration.ORIENTATION_LANDSCAPE + if (savedInstanceState != null) { + imageUri = savedInstanceState.getParcelable(KEY_IMAGE_URI) + imageMaxWidth = savedInstanceState.getInt(KEY_IMAGE_MAX_WIDTH) + imageMaxHeight = savedInstanceState.getInt(KEY_IMAGE_MAX_HEIGHT) + selectedSize = savedInstanceState.getString(KEY_SELECTED_SIZE) + } + + val rootView = findViewById(R.id.root) + rootView.viewTreeObserver.addOnGlobalLayoutListener( + object : ViewTreeObserver.OnGlobalLayoutListener { + override fun onGlobalLayout() { + rootView.viewTreeObserver.removeOnGlobalLayoutListener(this) + imageMaxWidth = rootView.width + imageMaxHeight = rootView.height - findViewById(R.id.control).height + if (SIZE_SCREEN == selectedSize) { + tryReloadAndDetectInImage() + } + } + } + ) + + val settingsButton = findViewById(R.id.settings_button) + settingsButton.setOnClickListener { +// val intent = Intent(applicationContext, SettingsActivity::class.java) +// intent.putExtra(SettingsActivity.EXTRA_LAUNCH_SOURCE, LaunchSource.STILL_IMAGE) +// startActivity(intent) + } + } + + public override fun onResume() { + super.onResume() + Log.d(TAG, "onResume") + createImageProcessor() + tryReloadAndDetectInImage() + } + + public override fun onPause() { + super.onPause() + imageProcessor?.run { this.stop() } + } + + public override fun onDestroy() { + super.onDestroy() + imageProcessor?.run { this.stop() } + } + + private fun populateFeatureSelector() { + val featureSpinner = findViewById(R.id.feature_selector) + val options: MutableList = ArrayList() + options.add(TEXT_RECOGNITION_CHINESE); // 识别中文文本 + options.add(TEXT_RECOGNITION_LATIN); // 识别拉丁文本 + options.add(TEXT_RECOGNITION_DEVANAGARI); // 识别梵文文本 + options.add(TEXT_RECOGNITION_JAPANESE); // 识别日文文本 + options.add(TEXT_RECOGNITION_KOREAN); // 识别韩文文本 + + + // Creating adapter for featureSpinner + val dataAdapter = ArrayAdapter(this, R.layout.spinner_style, options) + // Drop down layout style - list view with radio button + dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item) + // attaching data adapter to spinner + featureSpinner.adapter = dataAdapter + featureSpinner.onItemSelectedListener = + object : OnItemSelectedListener { + override fun onItemSelected( + parentView: AdapterView<*>, + selectedItemView: View?, + pos: Int, + id: Long + ) { + if (pos >= 0) { + selectedMode = parentView.getItemAtPosition(pos).toString() + createImageProcessor() + tryReloadAndDetectInImage() + } + } + + override fun onNothingSelected(arg0: AdapterView<*>?) {} + } + } + + private fun populateSizeSelector() { + val sizeSpinner = findViewById(R.id.size_selector) + val options: MutableList = ArrayList() + options.add(SIZE_SCREEN) + options.add(SIZE_1024_768) + options.add(SIZE_640_480) + options.add(SIZE_ORIGINAL) + // Creating adapter for featureSpinner + val dataAdapter = ArrayAdapter(this, R.layout.spinner_style, options) + // Drop down layout style - list view with radio button + dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item) + // attaching data adapter to spinner + sizeSpinner.adapter = dataAdapter + sizeSpinner.onItemSelectedListener = + object : OnItemSelectedListener { + override fun onItemSelected( + parentView: AdapterView<*>, + selectedItemView: View?, + pos: Int, + id: Long + ) { + if (pos >= 0) { + selectedSize = parentView.getItemAtPosition(pos).toString() + tryReloadAndDetectInImage() + } + } + + override fun onNothingSelected(arg0: AdapterView<*>?) {} + } + } + + public override fun onSaveInstanceState(outState: Bundle) { + super.onSaveInstanceState(outState) + outState.putParcelable(KEY_IMAGE_URI, imageUri) + outState.putInt(KEY_IMAGE_MAX_WIDTH, imageMaxWidth) + outState.putInt(KEY_IMAGE_MAX_HEIGHT, imageMaxHeight) + outState.putString(KEY_SELECTED_SIZE, selectedSize) + } + + private fun startCameraIntentForResult() { // Clean up last time's image + imageUri = null + preview!!.setImageBitmap(null) + val takePictureIntent = Intent(MediaStore.ACTION_IMAGE_CAPTURE) + if (takePictureIntent.resolveActivity(packageManager) != null) { + val values = ContentValues() + values.put(MediaStore.Images.Media.TITLE, "New Picture") + values.put(MediaStore.Images.Media.DESCRIPTION, "From Camera") + imageUri = contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values) + takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri) + startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE) + } + } + + private fun startChooseImageIntentForResult() { + val intent = Intent() + intent.type = "image/*" + intent.action = Intent.ACTION_GET_CONTENT + startActivityForResult(Intent.createChooser(intent, "Select Picture"), REQUEST_CHOOSE_IMAGE) + } + + override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) { + if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == Activity.RESULT_OK) { + tryReloadAndDetectInImage() + } else if (requestCode == REQUEST_CHOOSE_IMAGE && resultCode == Activity.RESULT_OK) { + // In this case, imageUri is returned by the chooser, save it. + imageUri = data!!.data + tryReloadAndDetectInImage() + } else { + super.onActivityResult(requestCode, resultCode, data) + } + } + + private fun tryReloadAndDetectInImage() { + Log.d(TAG, "Try reload and detect image") + try { + if (imageUri == null) { + return + } + + if (SIZE_SCREEN == selectedSize && imageMaxWidth == 0) { + // UI layout has not finished yet, will reload once it's ready. + return + } + + val imageBitmap = + BitmapUtils.getBitmapFromContentUri(contentResolver, imageUri) ?: return + // Clear the overlay first + graphicOverlay!!.clear() + + val resizedBitmap: Bitmap + resizedBitmap = + if (selectedSize == SIZE_ORIGINAL) { + imageBitmap + } else { + // Get the dimensions of the image view + val targetedSize: Pair = targetedWidthHeight + + // Determine how much to scale down the image + val scaleFactor = + Math.max( + imageBitmap.width.toFloat() / targetedSize.first.toFloat(), + imageBitmap.height.toFloat() / targetedSize.second.toFloat() + ) + Bitmap.createScaledBitmap( + imageBitmap, + (imageBitmap.width / scaleFactor).toInt(), + (imageBitmap.height / scaleFactor).toInt(), + true + ) + } + + preview!!.setImageBitmap(resizedBitmap) + if (imageProcessor != null) { + graphicOverlay!!.setImageSourceInfo( + resizedBitmap.width, + resizedBitmap.height, + /* isFlipped= */ false + ) + imageProcessor!!.processBitmap(resizedBitmap, graphicOverlay) + } else { + Log.e( + TAG, + "Null imageProcessor, please check adb logs for imageProcessor creation error" + ) + } + } catch (e: IOException) { + Log.e(TAG, "Error retrieving saved image") + imageUri = null + } + } + + private val targetedWidthHeight: Pair + get() { + val targetWidth: Int + val targetHeight: Int + when (selectedSize) { + SIZE_SCREEN -> { + targetWidth = imageMaxWidth + targetHeight = imageMaxHeight + } + + SIZE_640_480 -> { + targetWidth = if (isLandScape) 640 else 480 + targetHeight = if (isLandScape) 480 else 640 + } + + SIZE_1024_768 -> { + targetWidth = if (isLandScape) 1024 else 768 + targetHeight = if (isLandScape) 768 else 1024 + } + + else -> throw IllegalStateException("Unknown size") + } + return Pair(targetWidth, targetHeight) + } + + private fun createImageProcessor() { + try { + when (selectedMode) { + TEXT_RECOGNITION_LATIN -> + imageProcessor = + TextRecognitionProcessor(this, TextRecognizerOptions.Builder().build()) + + TEXT_RECOGNITION_CHINESE -> + imageProcessor = + TextRecognitionProcessor( + this, + ChineseTextRecognizerOptions.Builder().build() + ) + + TEXT_RECOGNITION_DEVANAGARI -> + imageProcessor = + TextRecognitionProcessor( + this, + DevanagariTextRecognizerOptions.Builder().build() + ) + + TEXT_RECOGNITION_JAPANESE -> + imageProcessor = + TextRecognitionProcessor( + this, + JapaneseTextRecognizerOptions.Builder().build() + ) + + TEXT_RECOGNITION_KOREAN -> + imageProcessor = + TextRecognitionProcessor( + this, + KoreanTextRecognizerOptions.Builder().build() + ) + + else -> Log.e(TAG, "Unknown selectedMode: $selectedMode") + } + } catch (e: Exception) { + Log.e(TAG, "Can not create image processor: $selectedMode", e) + Toast.makeText( + applicationContext, + "Can not create image processor: " + e.message, + Toast.LENGTH_LONG + ) + .show() + } + } + + companion object { + private const val TAG = "StillImageActivity" + private const val TEXT_RECOGNITION_LATIN = "Text Recognition Latin" + private const val TEXT_RECOGNITION_CHINESE = "Text Recognition Chinese" + private const val TEXT_RECOGNITION_DEVANAGARI = "Text Recognition Devanagari" + private const val TEXT_RECOGNITION_JAPANESE = "Text Recognition Japanese" + private const val TEXT_RECOGNITION_KOREAN = "Text Recognition Korean" + + private const val SIZE_SCREEN = "w:screen" // Match screen width + private const val SIZE_1024_768 = "w:1024" // ~1024*768 in a normal ratio + private const val SIZE_640_480 = "w:640" // ~640*480 in a normal ratio + private const val SIZE_ORIGINAL = "w:original" // Original image size + private const val KEY_IMAGE_URI = "com.google.mlkit.vision.demo.KEY_IMAGE_URI" + private const val KEY_IMAGE_MAX_WIDTH = "com.google.mlkit.vision.demo.KEY_IMAGE_MAX_WIDTH" + private const val KEY_IMAGE_MAX_HEIGHT = "com.google.mlkit.vision.demo.KEY_IMAGE_MAX_HEIGHT" + private const val KEY_SELECTED_SIZE = "com.google.mlkit.vision.demo.KEY_SELECTED_SIZE" + private const val REQUEST_IMAGE_CAPTURE = 1001 + private const val REQUEST_CHOOSE_IMAGE = 1002 + } +} diff --git a/app/src/main/java/com/assimilate/alltrans/viewui/TextResultActivity.kt b/app/src/main/java/com/assimilate/alltrans/viewui/TextResultActivity.kt new file mode 100644 index 0000000..732d74a --- /dev/null +++ b/app/src/main/java/com/assimilate/alltrans/viewui/TextResultActivity.kt @@ -0,0 +1,37 @@ +package com.assimilate.alltrans.viewui + +import android.content.Intent +import android.os.Bundle +import androidx.activity.enableEdgeToEdge +import androidx.appcompat.app.AppCompatActivity +import androidx.core.view.ViewCompat +import androidx.core.view.WindowInsetsCompat +import com.assimilate.alltrans.R +import com.assimilate.alltrans.databinding.ActivityTextResultBinding + +class TextResultActivity : AppCompatActivity() { + + private lateinit var binding: ActivityTextResultBinding + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + binding = ActivityTextResultBinding.inflate(layoutInflater) + enableEdgeToEdge() + setContentView(binding.root) + ViewCompat.setOnApplyWindowInsetsListener(findViewById(R.id.main)) { v, insets -> + val systemBars = insets.getInsets(WindowInsetsCompat.Type.systemBars()) + v.setPadding(systemBars.left, systemBars.top, systemBars.right, systemBars.bottom) + insets + } + + initClick() + } + + private fun initClick() { + binding.ivReToPhoto.setOnClickListener { + startActivity( + Intent(this, StillImageActivity::class.java) + ) + } + } +} \ No newline at end of file diff --git a/app/src/main/res/drawable-xxxhdpi/ic_exchage.webp b/app/src/main/res/drawable-xxxhdpi/ic_exchage.webp new file mode 100644 index 0000000..507ee4f Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_exchage.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_favorite.webp b/app/src/main/res/drawable-xxxhdpi/ic_favorite.webp new file mode 100644 index 0000000..85ea94e Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_favorite.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_language.webp b/app/src/main/res/drawable-xxxhdpi/ic_language.webp new file mode 100644 index 0000000..33871f0 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_language.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_privacy_policy.webp b/app/src/main/res/drawable-xxxhdpi/ic_privacy_policy.webp new file mode 100644 index 0000000..2834bb1 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_privacy_policy.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_rate.webp b/app/src/main/res/drawable-xxxhdpi/ic_rate.webp new file mode 100644 index 0000000..a587d05 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_rate.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_set_share.webp b/app/src/main/res/drawable-xxxhdpi/ic_set_share.webp new file mode 100644 index 0000000..a094528 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_set_share.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png b/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png new file mode 100755 index 0000000..180470c Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png differ diff --git a/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml b/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml new file mode 100755 index 0000000..e3c887f --- /dev/null +++ b/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml @@ -0,0 +1,9 @@ + + + diff --git a/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp_inset.png b/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp_inset.png new file mode 100644 index 0000000..e69de29 diff --git a/app/src/main/res/drawable-xxxhdpi/ic_version_update.webp b/app/src/main/res/drawable-xxxhdpi/ic_version_update.webp new file mode 100644 index 0000000..d100b94 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/ic_version_update.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_dic.webp b/app/src/main/res/drawable-xxxhdpi/main_dic.webp new file mode 100644 index 0000000..d9021ba Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_dic.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_history.webp b/app/src/main/res/drawable-xxxhdpi/main_history.webp new file mode 100644 index 0000000..1a5cd8c Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_history.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_ic_quick.webp b/app/src/main/res/drawable-xxxhdpi/main_ic_quick.webp new file mode 100644 index 0000000..eb3ed25 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_ic_quick.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_paste.webp b/app/src/main/res/drawable-xxxhdpi/main_paste.webp new file mode 100644 index 0000000..27e27dd Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_paste.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_photo.webp b/app/src/main/res/drawable-xxxhdpi/main_photo.webp new file mode 100644 index 0000000..429f430 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_photo.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_setting.webp b/app/src/main/res/drawable-xxxhdpi/main_setting.webp new file mode 100644 index 0000000..06e094d Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_setting.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_setting_quick.webp b/app/src/main/res/drawable-xxxhdpi/main_setting_quick.webp new file mode 100644 index 0000000..8ec449e Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_setting_quick.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/main_voice_text.webp b/app/src/main/res/drawable-xxxhdpi/main_voice_text.webp new file mode 100644 index 0000000..7ee646f Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/main_voice_text.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/set_sheet_icon.webp b/app/src/main/res/drawable-xxxhdpi/set_sheet_icon.webp new file mode 100644 index 0000000..2138d98 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/set_sheet_icon.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_all.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_all.webp new file mode 100644 index 0000000..cce2acc Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_all.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_copy.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_copy.webp new file mode 100644 index 0000000..9d0d175 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_copy.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_distr.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_distr.webp new file mode 100644 index 0000000..f147c3c Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_distr.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_home.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_home.webp new file mode 100644 index 0000000..d80492d Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_home.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_move.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_move.webp new file mode 100644 index 0000000..069832a Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_move.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/sus_trans_photo.webp b/app/src/main/res/drawable-xxxhdpi/sus_trans_photo.webp new file mode 100644 index 0000000..9e8ea13 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/sus_trans_photo.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/tr_photo.webp b/app/src/main/res/drawable-xxxhdpi/tr_photo.webp new file mode 100644 index 0000000..0c0a750 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/tr_photo.webp differ diff --git a/app/src/main/res/drawable-xxxhdpi/tr_voice.webp b/app/src/main/res/drawable-xxxhdpi/tr_voice.webp new file mode 100644 index 0000000..f98cb37 Binary files /dev/null and b/app/src/main/res/drawable-xxxhdpi/tr_voice.webp differ diff --git a/app/src/main/res/drawable/button_r10_gray_bg.xml b/app/src/main/res/drawable/button_r10_gray_bg.xml new file mode 100644 index 0000000..d228ebe --- /dev/null +++ b/app/src/main/res/drawable/button_r10_gray_bg.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/button_r20_black_bg.xml b/app/src/main/res/drawable/button_r20_black_bg.xml new file mode 100644 index 0000000..44a9fea --- /dev/null +++ b/app/src/main/res/drawable/button_r20_black_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/button_r20_white_bg.xml b/app/src/main/res/drawable/button_r20_white_bg.xml new file mode 100644 index 0000000..9166980 --- /dev/null +++ b/app/src/main/res/drawable/button_r20_white_bg.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/button_r24_blue_bg.xml b/app/src/main/res/drawable/button_r24_blue_bg.xml new file mode 100644 index 0000000..09c863a --- /dev/null +++ b/app/src/main/res/drawable/button_r24_blue_bg.xml @@ -0,0 +1,12 @@ + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/drawable/ic_add.xml b/app/src/main/res/drawable/ic_add.xml new file mode 100644 index 0000000..2af50bb --- /dev/null +++ b/app/src/main/res/drawable/ic_add.xml @@ -0,0 +1,21 @@ + + + + + + + diff --git a/app/src/main/res/drawable/ic_arrow_right.xml b/app/src/main/res/drawable/ic_arrow_right.xml new file mode 100644 index 0000000..275e2a3 --- /dev/null +++ b/app/src/main/res/drawable/ic_arrow_right.xml @@ -0,0 +1,16 @@ + + + + + + + diff --git a/app/src/main/res/drawable/ic_arrow_try.xml b/app/src/main/res/drawable/ic_arrow_try.xml new file mode 100644 index 0000000..13e1e59 --- /dev/null +++ b/app/src/main/res/drawable/ic_arrow_try.xml @@ -0,0 +1,22 @@ + + + + + + + + diff --git a/app/src/main/res/drawable/ic_back.xml b/app/src/main/res/drawable/ic_back.xml new file mode 100644 index 0000000..f8cda12 --- /dev/null +++ b/app/src/main/res/drawable/ic_back.xml @@ -0,0 +1,19 @@ + + + + + diff --git a/app/src/main/res/drawable/ic_close.xml b/app/src/main/res/drawable/ic_close.xml new file mode 100644 index 0000000..08e87cc --- /dev/null +++ b/app/src/main/res/drawable/ic_close.xml @@ -0,0 +1,26 @@ + + + + + + + diff --git a/app/src/main/res/drawable/ic_copy.xml b/app/src/main/res/drawable/ic_copy.xml new file mode 100644 index 0000000..b215c88 --- /dev/null +++ b/app/src/main/res/drawable/ic_copy.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + diff --git a/app/src/main/res/drawable/ic_dashed_line.xml b/app/src/main/res/drawable/ic_dashed_line.xml new file mode 100644 index 0000000..b5c0f9f --- /dev/null +++ b/app/src/main/res/drawable/ic_dashed_line.xml @@ -0,0 +1,14 @@ + + + + + + + + + + diff --git a/app/src/main/res/drawable/ic_dashed_line_4b4b4b4.xml b/app/src/main/res/drawable/ic_dashed_line_4b4b4b4.xml new file mode 100644 index 0000000..8df3e00 --- /dev/null +++ b/app/src/main/res/drawable/ic_dashed_line_4b4b4b4.xml @@ -0,0 +1,14 @@ + + + + + + + + + + diff --git a/app/src/main/res/drawable/ic_down_choose.xml b/app/src/main/res/drawable/ic_down_choose.xml new file mode 100644 index 0000000..fda7c11 --- /dev/null +++ b/app/src/main/res/drawable/ic_down_choose.xml @@ -0,0 +1,10 @@ + + + + diff --git a/app/src/main/res/drawable/ic_like_def.xml b/app/src/main/res/drawable/ic_like_def.xml new file mode 100644 index 0000000..a014740 --- /dev/null +++ b/app/src/main/res/drawable/ic_like_def.xml @@ -0,0 +1,17 @@ + + + + + + + + diff --git a/app/src/main/res/drawable/ic_next.xml b/app/src/main/res/drawable/ic_next.xml new file mode 100644 index 0000000..e646eab --- /dev/null +++ b/app/src/main/res/drawable/ic_next.xml @@ -0,0 +1,18 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/ic_share.xml b/app/src/main/res/drawable/ic_share.xml new file mode 100644 index 0000000..4975cbe --- /dev/null +++ b/app/src/main/res/drawable/ic_share.xml @@ -0,0 +1,38 @@ + + + + + + + + + diff --git a/app/src/main/res/drawable/ic_voice.xml b/app/src/main/res/drawable/ic_voice.xml new file mode 100644 index 0000000..74b106c --- /dev/null +++ b/app/src/main/res/drawable/ic_voice.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + diff --git a/app/src/main/res/layout/activity_history.xml b/app/src/main/res/layout/activity_history.xml new file mode 100644 index 0000000..f8d9080 --- /dev/null +++ b/app/src/main/res/layout/activity_history.xml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/activity_language_change.xml b/app/src/main/res/layout/activity_language_change.xml new file mode 100644 index 0000000..16fe26d --- /dev/null +++ b/app/src/main/res/layout/activity_language_change.xml @@ -0,0 +1,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/activity_main.xml b/app/src/main/res/layout/activity_main.xml index 86a5d97..bdd4f09 100644 --- a/app/src/main/res/layout/activity_main.xml +++ b/app/src/main/res/layout/activity_main.xml @@ -5,9 +5,15 @@ android:id="@+id/main" android:layout_width="match_parent" android:layout_height="match_parent" - tools:context=".MainActivity"> + android:background="@color/main_bg_fff9f9f9" + android:paddingStart="16dp" + android:paddingTop="16dp" + android:paddingEnd="16dp" + tools:context=".viewui.MainActivity"> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/activity_quick_set.xml b/app/src/main/res/layout/activity_quick_set.xml new file mode 100644 index 0000000..4185f2b --- /dev/null +++ b/app/src/main/res/layout/activity_quick_set.xml @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/res/layout/activity_settings.xml b/app/src/main/res/layout/activity_settings.xml new file mode 100644 index 0000000..6ed939e --- /dev/null +++ b/app/src/main/res/layout/activity_settings.xml @@ -0,0 +1,238 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/app/src/main/res/layout/activity_still_image.xml b/app/src/main/res/layout/activity_still_image.xml new file mode 100755 index 0000000..12b196b --- /dev/null +++ b/app/src/main/res/layout/activity_still_image.xml @@ -0,0 +1,76 @@ + + + + + + + + + +