摇曳的蔷薇
查看堆栈跟踪,我发现您正在尝试连接导致崩溃的 ui 线程。您需要将连接逻辑移至其自己的线程中这是可以帮助您的文档链接 https://developer.android.com/guide/components/processes-and-threads#Threads尝试这个主要活动import android.Manifest;import android.annotation.SuppressLint;import android.app.Activity;import android.app.Application;import android.content.ActivityNotFoundException;import android.content.ContentValues;import android.content.Intent;import android.content.SharedPreferences;import android.content.pm.PackageManager;import android.graphics.Bitmap;import android.location.Address;import android.location.Geocoder;import android.location.Location;import android.media.Image;import android.net.Uri;import android.os.AsyncTask;import android.os.Build;import android.os.Bundle;import android.provider.MediaStore;import android.provider.Settings;import android.speech.RecognitionListener;import android.speech.RecognizerIntent;import android.speech.SpeechRecognizer;import android.speech.tts.TextToSpeech;import android.util.Log;import android.view.MotionEvent;import android.view.View;import android.widget.Button;import android.widget.EditText;import android.widget.GridLayout;import android.widget.TextView;import com.chaquo.python.PyObject;import com.chaquo.python.Python;import com.chaquo.python.android.AndroidPlatform;import com.google.android.gms.location.FusedLocationProviderClient;import com.google.android.gms.tasks.OnCompleteListener;import com.google.android.gms.tasks.OnFailureListener;import com.google.android.gms.tasks.OnSuccessListener;import com.google.android.gms.tasks.Task;import com.google.firebase.ml.vision.FirebaseVision;import com.google.firebase.ml.vision.common.FirebaseVisionImage;import com.google.firebase.ml.vision.text.FirebaseVisionText;import com.google.firebase.ml.vision.text.FirebaseVisionTextDetector;import org.w3c.dom.Text;import java.io.IOException;import java.text.SimpleDateFormat;import java.util.ArrayList;import java.util.Date;import java.util.List;import java.util.Locale;import java.util.Random;import java.net.*;import java.io.*;import static android.Manifest.permission.ACCESS_FINE_LOCATION;import static android.Manifest.permission.CAMERA;import static android.Manifest.permission.READ_EXTERNAL_STORAGE;import static android.Manifest.permission.RECORD_AUDIO;import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;public class MainActivity extends AppCompatActivity { private static final String TAG = "MainActivity"; private Button btnRecognize; private SpeechRecognizer speechRecognizer; static EditText ET_ShowRecognized; String locality; private Intent intent; private String ET_ShowRecognizedText; private String ProcessingText; //private FusedLocationProviderClient fusedLocationProviderClient; //Geocoder geocoder; Python py; PyObject pyobj; PyObject obj; String currentDate; String currentTime; static TextToSpeech tts; Uri imageURI; ContentValues contentValues; Intent cameraIntent; static final int REQUEST_IMAGE_CAPTURE = 1; Image mediaImage; FirebaseVisionImage firebaseVisionImage; static Bitmap imageBitmap; FirebaseVisionTextDetector textDetector; String imgText; Intent CameraIntent; static Thread sent; static Thread receive; static Socket socket; InputStreamReader in; BufferedReader bf; String ServerOutput; PrintWriter writer; String ServerInput; @SuppressLint({"SetTextI18n", "ClickableViewAccessibility", "MissingPermission"}) @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ActivityCompat.requestPermissions(this, new String[]{RECORD_AUDIO, WRITE_EXTERNAL_STORAGE, READ_EXTERNAL_STORAGE, ACCESS_FINE_LOCATION, CAMERA}, PackageManager.PERMISSION_GRANTED); ET_ShowRecognized = findViewById(R.id.ET_ShowRecognized); btnRecognize = findViewById(R.id.btnRecognize); /*fusedLocationProviderClient.getLastLocation().addOnCompleteListener(new OnCompleteListener<Location>() { @Override public void onComplete(@NonNull Task<Location> task) { Location location = task.getResult(); if(location != null){ geocoder = new Geocoder(MainActivity.this, Locale.getDefault()); try { List<Address> address = geocoder.getFromLocation(location.getLatitude(), location.getLongitude(), 1); locality = address.get(0).getLocality(); } catch (IOException e) { ; } } } }); if(!Python.isStarted()){ Python.start(new AndroidPlatform(this)); } py = Python.getInstance(); pyobj = py.getModule("WolframAlpha"); obj = pyobj.callAttr("main", locality);*/ tts = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() { @Override public void onInit(int i) { if (i == TextToSpeech.SUCCESS) { tts.setLanguage(Locale.ENGLISH); } tts.speak("Hi you successfully ran me.", TextToSpeech.QUEUE_FLUSH, null, null); tts.speak("Seems good to meet you.", TextToSpeech.QUEUE_FLUSH, null, null); } }); //currentDate = new SimpleDateFormat("dd-MM-yyyy", Locale.getDefault()).format(new Date()); //currentTime = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()).format(new Date()); //textToSpeech.speak("Hi! I am your personal assistant. Today date is something something ", TextToSpeech.QUEUE_FLUSH, null, null); //Speak("Today's weather forecast for the current location is " + obj.toString()); intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); speechRecognizer.setRecognitionListener(new RecognitionListener() { @Override public void onReadyForSpeech(Bundle bundle) { } @Override public void onBeginningOfSpeech() { } @Override public void onRmsChanged(float v) { } @Override public void onBufferReceived(byte[] bytes) { } @Override public void onEndOfSpeech() { } @Override public void onError(int i) { } @Override public void onResults(Bundle bundle) { ArrayList<String> mathches = bundle.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (mathches != null) { ET_ShowRecognized.setText(mathches.get(0)); process(); } } @Override public void onPartialResults(Bundle bundle) { } @Override public void onEvent(int i, Bundle bundle) { } }); btnRecognize.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { switch (motionEvent.getAction()) { case MotionEvent.ACTION_UP: speechRecognizer.stopListening(); break; case MotionEvent.ACTION_DOWN: ET_ShowRecognized.setText(null); ET_ShowRecognized.setText("Listening..."); speechRecognizer.startListening(intent); break; default: break; } return false; } }); } public void process() { ProcessingText = ET_ShowRecognized.getText().toString().toLowerCase(); if(ProcessingText.contains("hello")) { tts.speak("Hi! I hope all is well.", TextToSpeech.QUEUE_FLUSH, null, null); } else if(ProcessingText.contains("hi")){ tts.speak("Hello! Nice to meet you.", TextToSpeech.QUEUE_FLUSH, null, null); } else if(ProcessingText.contains("your name")){ tts.speak("My name is assistant.", TextToSpeech.QUEUE_FLUSH, null, null); } else if(ProcessingText.contains("recognise text")){ tts.speak("Opening Camera.", TextToSpeech.QUEUE_FLUSH, null, null); dispatchTakePictureIntent(); } else if(ProcessingText.contains("bye")){ finish(); System.exit(0); } else if(ProcessingText.contains("current temperature")){ sendTemp(); recieve_data(); }else { tts.speak(ProcessingText, TextToSpeech.QUEUE_FLUSH, null, null); } } private void dispatchTakePictureIntent() { CameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); try { startActivityForResult(CameraIntent, REQUEST_IMAGE_CAPTURE); } catch (ActivityNotFoundException e) { // display error state to the user } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) { Bundle extras = data.getExtras(); imageBitmap = (Bitmap) extras.get("data"); //imageView.setImageBitmap(imageBitmap); detectTextFromImage(); } } private void detectTextFromImage() { firebaseVisionImage = FirebaseVisionImage.fromBitmap(imageBitmap); textDetector = FirebaseVision.getInstance().getVisionTextDetector(); textDetector.detectInImage(firebaseVisionImage).addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() { @Override public void onSuccess(FirebaseVisionText firebaseVisionText) { //speakTextFromImage(firebaseVisionText); getImgText(firebaseVisionText); } }).addOnFailureListener(new OnFailureListener() { @SuppressLint("SetTextI18n") @Override public void onFailure(@NonNull Exception e) { tts.speak("Something went wrong. Please try again later or try with another image.", TextToSpeech.QUEUE_FLUSH, null, null); ET_ShowRecognized.setText("Something went wrong. Please try again later or try with another image."); } }); } @SuppressLint("SetTextI18n") private void getImgText(FirebaseVisionText firebaseVisionText){ List<FirebaseVisionText.Block> blockList = firebaseVisionText.getBlocks(); if(blockList.size() == 0) { tts.speak("I think this image contains no text.", TextToSpeech.QUEUE_FLUSH, null, null); ET_ShowRecognized.setText("I think this image contains no text."); }else{ for(FirebaseVisionText.Block block : firebaseVisionText.getBlocks()){ imgText = block.getText().toString(); tts.speak("The text in the image is as follows : " + imgText, TextToSpeech.QUEUE_FLUSH, null, null); ET_ShowRecognized.setText("The text in the image is as follows : " + imgText); } } } public void recieve_data(){ ServerInput = "Java client is successfully connected with the server "; BackgroundTask bt = new BackgroundTask(); bt.execute(ServerInput); } public void sendTemp(){ new TempBackgroundTask().execute(); } class TempBackgroundTask extends AsyncTask<Void, String, Void>{ @Override protected Void doInBackground(String... voids) { try { socket = new Socket("myIP",12345); } catch (UnknownHostException e1) { e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } sent = new Thread(new Runnable(){ @Override public void run() { try { bf = new BufferedReader(new InputStreamReader(socket.getInputStream())); while(true){ ServerOutput = bf.readLine().toString(); publishProgress(ServerOutput); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); sent.start(); try { sent.join(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } @SuppressWarnings("unchecked") @Override protected void onProgressUpdate(String... text) { MainActivity.tts.speak(text[0], TextToSpeech.QUEUE_FLUSH, null, null); MainActivity.ET_ShowRecognized.setText(text[0]); } } class BackgroundTask extends AsyncTask<String, Void, Void>{ @Override protected Void doInBackground(String... voids) { try{ String message = voids[0]; socket = new Socket("192.168.43.203", 24224); writer = new PrintWriter(socket.getOutputStream()); writer.write(message); writer.flush(); writer.close(); socket.close(); }catch (IOException e){ e.printStackTrace(); } return null; } } @Override protected void onPause() { super.onPause(); } @Override protected void onResume() { super.onResume(); }}