import pyLDAvis
import pyLDAvis.sklearn
pyLDAvis.enable_notebook()
pic = pyLDAvis.sklearn.prepare(lda, tf, tf_vectorizer)
pyLDAvis.save_html(pic, 'lda_pass'+str(n_topics)+'.html')
pyLDAvis.show(pic)
UnicodeEncodeError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_8172/1071317399.py in <module>
1 pyLDAvis.enable_notebook()
----> 2 pic = pyLDAvis.sklearn.prepare(lda, tf, tf_vectorizer)
3 pyLDAvis.save_html(pic, 'lda_pass'+str(n_topics)+'.html')
4 pyLDAvis.show(pic)
C:\ProgramData\Anaconda3\lib\site-packages\pyLDAvis\sklearn.py in prepare(lda_model, dtm, vectorizer, **kwargs)
93 """
94 opts = fp.merge(_extract_data(lda_model, dtm, vectorizer), kwargs)
---> 95 return pyLDAvis.prepare(**opts)
C:\ProgramData\Anaconda3\lib\site-packages\pyLDAvis\_prepare.py in prepare(topic_term_dists, doc_topic_dists, doc_lengths, vocab, term_frequency, R, lambda_step, mds, n_jobs, plot_opts, sort_topics, start_index)
437 term_frequency = np.sum(term_topic_freq, axis=0)
438
--> 439 topic_info = _topic_info(topic_term_dists, topic_proportion,
440 term_frequency, term_topic_freq, vocab, lambda_step, R,
441 n_jobs, start_index)
C:\ProgramData\Anaconda3\lib\site-packages\pyLDAvis\_prepare.py in _topic_info(topic_term_dists, topic_proportion, term_frequency, term_topic_freq, vocab, lambda_step, R, n_jobs, start_index)
274 ])
275
--> 276 top_terms = pd.concat(Parallel(n_jobs=n_jobs)
277 (delayed(_find_relevance_chunks)(log_ttd, log_lift, R, ls)
278 for ls in _job_chunks(lambda_seq, n_jobs)))
C:\ProgramData\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
966
967 if not self._managed_backend:
--> 968 n_jobs = self._initialize_backend()
969 else:
970 n_jobs = self._effective_n_jobs()
C:\ProgramData\Anaconda3\lib\site-packages\joblib\parallel.py in _initialize_backend(self)
733 """Build a process or thread pool and return the number of workers"""
734 try:
--> 735 n_jobs = self._backend.configure(n_jobs=self.n_jobs, parallel=self,
736 **self._backend_args)
737 if self.timeout is not None and not self._backend.supports_timeout:
C:\ProgramData\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in configure(self, n_jobs, parallel, prefer, require, idle_worker_timeout, **memmappingexecutor_args)
492 SequentialBackend(nesting_level=self.nesting_level))
493
--> 494 self._workers = get_memmapping_executor(
495 n_jobs, timeout=idle_worker_timeout,
496 env=self._prepare_worker_env(n_jobs=n_jobs),
C:\ProgramData\Anaconda3\lib\site-packages\joblib\executor.py in get_memmapping_executor(n_jobs, **kwargs)
18
19 def get_memmapping_executor(n_jobs, **kwargs):
---> 20 return MemmappingExecutor.get_memmapping_executor(n_jobs, **kwargs)
21
22
C:\ProgramData\Anaconda3\lib\site-packages\joblib\executor.py in get_memmapping_executor(cls, n_jobs, timeout, initializer, initargs, env, temp_folder, context_id, **backend_args)
40 _executor_args = executor_args
41
---> 42 manager = TemporaryResourcesManager(temp_folder)
43
44 # reducers access the temporary folder in which to store temporary
C:\ProgramData\Anaconda3\lib\site-packages\joblib\_memmapping_reducer.py in __init__(self, temp_folder_root, context_id)
529 # exposes exposes too many low-level details.
530 context_id = uuid4().hex
--> 531 self.set_current_context(context_id)
532
533 def set_current_context(self, context_id):
C:\ProgramData\Anaconda3\lib\site-packages\joblib\_memmapping_reducer.py in set_current_context(self, context_id)
533 def set_current_context(self, context_id):
534 self._current_context_id = context_id
--> 535 self.register_new_context(context_id)
536
537 def register_new_context(self, context_id):
C:\ProgramData\Anaconda3\lib\site-packages\joblib\_memmapping_reducer.py in register_new_context(self, context_id)
558 new_folder_name, self._temp_folder_root
559 )
--> 560 self.register_folder_finalizer(new_folder_path, context_id)
561 self._cached_temp_folders[context_id] = new_folder_path
562
C:\ProgramData\Anaconda3\lib\site-packages\joblib\_memmapping_reducer.py in register_folder_finalizer(self, pool_subfolder, context_id)
588 # semaphores and pipes
589 pool_module_name = whichmodule(delete_folder, 'delete_folder')
--> 590 resource_tracker.register(pool_subfolder, "folder")
591
592 def _cleanup():
C:\ProgramData\Anaconda3\lib\site-packages\joblib\externals\loky\backend\resource_tracker.py in register(self, name, rtype)
189 '''Register a named resource, and increment its refcount.'''
190 self.ensure_running()
--> 191 self._send('REGISTER', name, rtype)
192
193 def unregister(self, name, rtype):
C:\ProgramData\Anaconda3\lib\site-packages\joblib\externals\loky\backend\resource_tracker.py in _send(self, cmd, name, rtype)
202
203 def _send(self, cmd, name, rtype):
--> 204 msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii')
205 if len(name) > 512:
206 # posix guarantees that writes to a pipe of less than PIPE_BUF
UnicodeEncodeError: 'ascii' codec can't encode characters in position 18-20: ordinal not in range(128)
这个怎么解决呀