Skip to content

55th MAD NSC

Natioal Cover THE PREPARE NOTE

@RunWith(AndroidJUnit4::class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
class WordKingUnitTest {
@get:Rule
val rule = createAndroidComposeRule<MainActivity>()
var words = listOf(Pair("Hello", "你好"), Pair("Kitty", "貓咪"), Pair("Sofia", "育華"))
var wordsAfter = mutableListOf(
Pair("Hello after", "你好 after"),
Pair("Kitty after", "貓咪 after"),
Pair("Sofia after", "育華 after")
)
fun sleep() {
Thread.sleep(1000)
}
@Before
fun setup(){
rule.waitForIdle()
}
@Test
fun `1 Show word list when open app`() {
rule.onNodeWithTag("word_list_screen").assertExists()
sleep()
}
@Test
fun `2 Create word`() {
words.forEach {
rule.onNodeWithTag("add_word_btn").assertExists().performClick()
rule.onNodeWithTag("new_word_screen").assertExists()
rule.onNodeWithTag("en_input").performTextInput(it.first)
rule.onNodeWithTag("ch_input").performTextInput(it.second)
rule.onNodeWithTag("add_word_btn").performClick()
rule.onNodeWithTag("word_list_screen").assertExists()
}
sleep()
}
@Test
fun `3 Check words is there`() {
val wordsNode = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordsNode.fetchSemanticsNodes().forEachIndexed { index, node ->
wordsNode[index].onChildren().filterToOne(hasTestTag("en_text"))
.assertTextEquals(words[index].first)
wordsNode[index].onChildren().filterToOne(hasTestTag("ch_text"))
.assertTextEquals(words[index].second)
}
sleep()
}
@Test
fun `4 Edit word`() {
val wordCard = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordCard.fetchSemanticsNodes().forEachIndexed { index, node ->
val it = wordsAfter[index]
wordCard[index].performClick()
rule.onNodeWithTag("edit_word_screen").assertExists()
rule.onNodeWithTag("en_input").performTextClearance()
rule.onNodeWithTag("ch_input").performTextClearance()
rule.onNodeWithTag("en_input").performTextInput(it.first)
rule.onNodeWithTag("ch_input").performTextInput(it.second)
rule.onNodeWithTag("edit_word_btn").performClick()
rule.onNodeWithTag("word_list_screen").assertExists()
}
sleep()
}
@Test
fun `5 Check Edit`() {
val wordsNode = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordsNode.fetchSemanticsNodes().forEachIndexed { index, node ->
wordsNode[index].onChildren().filterToOne(hasTestTag("en_text"))
.assertTextEquals(wordsAfter[index].first)
wordsNode[index].onChildren().filterToOne(hasTestTag("ch_text"))
.assertTextEquals(wordsAfter[index].second)
}
sleep()
}
@Test
fun `6 Check isLearning`() {
val wordCard = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordCard.fetchSemanticsNodes().forEachIndexed { index, node ->
wordCard[index].onChildren().filterToOne(hasTestTag("is_learning_btn")).performClick()
}
sleep()
}
@Test
fun `7 Check isLearning tab contain isLearning word`() {
rule.onNodeWithTag("學習中").assertExists().performClick()
val wordsNode = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordsNode.fetchSemanticsNodes().forEachIndexed { index, node ->
wordsNode[index].onChildren().filterToOne(hasTestTag("en_text"))
.assertTextEquals(wordsAfter[index].first)
wordsNode[index].onChildren().filterToOne(hasTestTag("ch_text"))
.assertTextEquals(wordsAfter[index].second)
}
sleep()
}
@Test
fun `8 Play tts`() {
val wordCard = rule.onAllNodesWithTag("word", useUnmergedTree = true)
wordCard.fetchSemanticsNodes().forEachIndexed { index, node ->
wordCard[index].onChildren().filterToOne(hasTestTag("speak_word")).performClick()
sleep()
rule.waitForIdle()
}
sleep()
}
@Test
fun `9 Card testing`() {
rule.onNodeWithTag("單字卡").assertExists().performClick()
rule.waitForIdle()
val card = rule.onAllNodesWithTag("pager_card", useUnmergedTree = true)
card.fetchSemanticsNodes().forEachIndexed { index, node ->
card[index].onChildren().filterToOne(hasTestTag("card_text"))
.assertTextEquals(wordsAfter[index].first)
card[index].performClick()
rule.waitForIdle()
card[index].onChildren().filterToOne(hasTestTag("card_text"))
.assertTextEquals(wordsAfter[index].second)
rule.onNodeWithTag("horizontal_pager").performTouchInput {
swipeLeft()
}
}
sleep()
}
}
val views = RemoteViews(context.packageName, R.layout.testing_widget)
val openNote = Intent(context, MainActivity::class.java).apply {
putExtra("action", "open_note")
}
views.setOnClickPendingIntent(
R.id.open_note,
PendingIntent.getActivity(
context,
0,
openNote,
PendingIntent.FLAG_UPDATE_CURRENT or PendingIntent.FLAG_IMMUTABLE
)
)
paths.removeAll { stroke ->
stroke.offsets.all { point ->
(point - event.changes[0].position).getDistance() < 40f
}
}
@Composable
fun HandWritingArea() {
val path = remember { mutableStateListOf<Path>() }
var currentPoints = remember { mutableStateListOf<Offset>() }
Column(modifier = Modifier.fillMaxSize()) {
Canvas(modifier = Modifier
.fillMaxSize()
.pointerInput(Unit) {
awaitPointerEventScope {
while (true) {
val event = awaitPointerEvent()
val position = event.changes.first().position
val liftUp = event.changes.all { !it.pressed }
when {
liftUp -> {
path.add(Path().apply {
currentPoints.forEachIndexed { index, offset ->
if (index == 0) moveTo(offset.x, offset.y)
lineTo(offset.x, offset.y)
}
})
currentPoints.clear()
}
event.changes.first().pressed -> {
currentPoints.add(position)
}
}
}
}
}) {
path.forEach {
drawPath(it, color = Color.Black, style = Stroke(width = 20f))
}
if (currentPoints.isNotEmpty())
drawPath(
path = Path().apply {
currentPoints.forEachIndexed { index, offset ->
if (index == 0) moveTo(offset.x, offset.y)
lineTo(offset.x, offset.y)
}
}, color = Color.Black, style = Stroke(width = 20f)
)
}
}
}
class RecordingService : Service() {
override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
val notification =
NotificationCompat.Builder(this, recording_channel).setContentTitle("Screen Recording")
.setContentText("Recording in progress...").setSmallIcon(R.drawable.recording)
.build()
startForeground(1, notification, ServiceInfo.FOREGROUND_SERVICE_TYPE_MEDIA_PROJECTION)
val resultCode =
intent?.getIntExtra("resultCode", Activity.RESULT_CANCELED) ?: Activity.RESULT_CANCELED
val data = intent?.getParcelableExtra<Intent>("data")
if (resultCode == Activity.RESULT_OK && data != null) {
val projectionManager =
getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
mediaProjection = projectionManager.getMediaProjection(resultCode, data!!)
mediaProjection?.registerCallback(object : MediaProjection.Callback() {
override fun onStop() {
super.onStop()
stopRecording()
}
}, null)
startRecording()
} else {
stopSelf()
}
return START_NOT_STICKY
}
private var mediaProjection: MediaProjection? = null
private var virtualDisplay: VirtualDisplay? = null
private var videoEncoder: MediaCodec? = null
private var audioEncoder: MediaCodec? = null
private var audioRecord: AudioRecord? = null
private var muxer: MediaMuxer? = null
private var recordingJob: Job? = null
private var audioTrackIndex = -1
private var videoTrackIndex = -1
private var muxerStarted = false
private fun startRecording() {
val id = UUID.randomUUID().toString()
val screen = resources.displayMetrics
val width = screen.widthPixels
val height = screen.heightPixels
val dpi = screen.densityDpi
val outputFile = File(filesDir, "$id.mp4")
// video
val videoFormat = MediaFormat.createVideoFormat("video/avc", width, height).apply {
setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5)
setInteger(MediaFormat.KEY_FRAME_RATE, 30)
setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
}
videoEncoder = MediaCodec.createEncoderByType("video/avc").apply {
configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
}
val inputSurface = videoEncoder!!.createInputSurface()
videoEncoder!!.start()
// audio
val audioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", 44100, 2).apply {
setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
setInteger(MediaFormat.KEY_BIT_RATE, 128000)
setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 1024 * 1024)
}
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm").apply {
configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
start()
}
// recording
val config = AudioPlaybackCaptureConfiguration.Builder(mediaProjection!!)
.addMatchingUsage(AudioAttributes.USAGE_MEDIA).build()
val audioRecordFormat =
AudioFormat.Builder().setEncoding(AudioFormat.ENCODING_PCM_16BIT).setSampleRate(44100)
.setChannelMask(AudioFormat.CHANNEL_IN_STEREO).build()
val audioRecordMinBuffer = AudioRecord.getMinBufferSize(
44100,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT
)
if (ContextCompat.checkSelfPermission(
this,
Manifest.permission.RECORD_AUDIO
) == PackageManager.PERMISSION_GRANTED
) {
audioRecord =
AudioRecord.Builder().setAudioPlaybackCaptureConfig(config)
.setAudioFormat(audioRecordFormat).setBufferSizeInBytes(audioRecordMinBuffer)
.build()
} else {
stopSelf()
}
muxer = MediaMuxer(outputFile.absolutePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
virtualDisplay = mediaProjection?.createVirtualDisplay(
"ScreenCapture",
width,
height,
dpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
inputSurface, null, null
)
audioRecord?.startRecording()
recordingJob = CoroutineScope(Dispatchers.IO).launch {
val videoInfo = MediaCodec.BufferInfo()
val audioInfo = MediaCodec.BufferInfo()
val audioBuffer = ByteArray(audioRecordMinBuffer)
while (isActive) {
val currentVideoIndex = videoEncoder!!.dequeueOutputBuffer(videoInfo, 10000)
if (currentVideoIndex >= 0) {
val videoData = videoEncoder!!.getOutputBuffer(currentVideoIndex)!!
if ((videoInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
if (!muxerStarted) {
videoTrackIndex = muxer!!.addTrack(videoEncoder!!.outputFormat)
audioTrackIndex = muxer!!.addTrack(audioEncoder!!.outputFormat)
muxer!!.start()
muxerStarted = true
}
muxer!!.writeSampleData(videoTrackIndex, videoData, videoInfo)
}
videoEncoder!!.releaseOutputBuffer(currentVideoIndex, false)
}
val read = audioRecord!!.read(audioBuffer, 0, audioBuffer.size)
if (read > 0) {
val inputIndex = audioEncoder!!.dequeueInputBuffer(10000)
if (inputIndex >= 0) {
val inputBuffer = audioEncoder!!.getInputBuffer(inputIndex)!!
inputBuffer.clear()
val length = minOf(read, inputBuffer.capacity())
inputBuffer.put(audioBuffer, 0, length)
audioEncoder!!.queueInputBuffer(
inputIndex,
0,
length,
System.nanoTime() / 1000,
0
)
}
}
var outAudioIndex = audioEncoder!!.dequeueOutputBuffer(audioInfo, 0)
while (outAudioIndex >= 0) {
val outBuffer = audioEncoder!!.getOutputBuffer(outAudioIndex)!!
if ((audioInfo.flags and MediaCodec.CONFIGURE_FLAG_ENCODE) == 0 && muxerStarted) {
muxer!!.writeSampleData(audioTrackIndex, outBuffer, audioInfo)
}
audioEncoder!!.releaseOutputBuffer(outAudioIndex, false)
outAudioIndex = audioEncoder!!.dequeueOutputBuffer(audioInfo, 0)
}
}
}
}
private fun stopRecording() {
try {
recordingJob?.cancel()
audioRecord?.stop()
audioRecord?.release()
audioEncoder?.stop()
audioEncoder?.release()
videoEncoder?.stop()
videoEncoder?.release()
virtualDisplay?.release()
mediaProjection?.stop()
muxer?.stop()
muxer?.release()
} catch (e: Exception) {
Log.e("ScreenRecording", "error stopping")
}
}
override fun onDestroy() {
super.onDestroy()
stopRecording()
}
override fun onBind(intent: Intent?): IBinder? {
return null
}
}
var mediaPlayer by remember { mutableStateOf<MediaPlayer?>(null) }
var surfaceHolder by remember { mutableStateOf<SurfaceHolder?>(null) }
AndroidView(
factory = {
SurfaceView(context).apply {
s holder.addCallback(object : SurfaceHolder.Callback {
override fun surfaceCreated(holder: SurfaceHolder) {
try {
mediaPlayer = MediaPlayer().apply {
setDataSource(
context,
<VIDEO-URI>,
mapOf("<HEADER>" to "<HEADER-CONTENT>")
)
setDisplay(holder)
setOnPreparedListener { mp ->
val videoWidth = mp.videoWidth.toFloat()
val videoHeight = mp.videoHeight.toFloat()
if (videoWidth > 0 && videoHeight > 0) {
aspectRatio = videoWidth / videoHeight
}
mp.start()
playerState = playerState.copy(isPlaying = true)
}
setOnErrorListener { _, what, extra ->
Log.e("player","error")
false
}
setOnCompletionListener {
playerState = playerState.copy(isPlaying = false)
}
prepareAsync()
}
surfaceHolder = holder
} catch (e: Exception) {
Log.e("player", "error", e)
}
}
override fun surfaceChanged(
holder: SurfaceHolder,
format: Int,
width: Int,
height: Int
) {
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
try {
mediaPlayer?.release()
mediaPlayer = null
surfaceHolder = null
} catch (e: Exception) {
Log.e("player", "error", e)
}
}
})
}
},
modifier = Modifier
.fillMaxWidth()
)

Display network image (Via Native HTTP Connection)

Section titled “Display network image (Via Native HTTP Connection)”
var bitmap by remember { mutableStateOf<Bitmap?>(null) }
LaunchedEffect(screenshot) {
withContext(Dispatchers.IO) {
bitmap = try {
val url = URL("$host/records/screenshots/$screenshot")
url.openConnection().run {
setRequestProperty("Authorization", "Bearer ${nav.accessToken}")
connect()
inputStream.use { BitmapFactory.decodeStream(it) }
}
} catch (e: Exception) {
e.printStackTrace()
null
}
}
}
fun downloadFile(url: String, file: File, token: String) {
val client = OkHttpClient()
val req = Request.Builder().url(url).addHeader("Authorization", "Bearer $token").get().build()
try {
val response = client.newCall(req).execute()
if (response.isSuccessful) {
val inputStream = response.body?.byteStream()
val outputStream = FileOutputStream(file)
inputStream.use { input ->
outputStream.use { output ->
input?.copyTo(output)
}
}
}
Log.i("Download", "successfully ${file.absolutePath}")
} catch (e: Exception) {
e.printStackTrace()
}
}
  • Setup AppLink in AndroidManifest.xml below is the setup for
studyflow://oauth?access_token=<JWT_TOKEN>&token_type=Bearer&username=<USERNAME>
<intent-filter>
<action android:name="android.intent.action.VIEW" />
<category android:name="android.intent.category.DEFAULT" />
<category android:name="android.intent.category.BROWSABLE" />
<data
android:host="oauth"
android:scheme="studyflow"
/>
</intent-filter>
  • via WebView
AndroidView(modifier = Modifier.fillMaxSize(),
factory = {
WebView(it).apply {
settings.javaScriptEnabled = true
loadUrl("<OAUTH PAGE HERE>")
webViewClient = object : WebViewClient() {
override fun shouldOverrideUrlLoading(view: WebView?, url: String?): Boolean {
if (url?.startsWith("studyflow://oauth")!!) {
val uri = Uri.parse(url)
val accessToken = uri?.getQueryParameter("access_token")
val username = uri?.getQueryParameter("username")
val tokenType = uri?.getQueryParameter("token_type")
}
return false
}
}
}
}
)
  • via device browser

Above we setup a applink, so when the like studyflow://oauth is open from the device browser android will redirect user to our app with the redirect link from webpage as a intent data, so the auth user we just have to detect whether the intent.data has something then just webview we can get the auth data from the URI

  • Open oauth page in device browser
val urlHandler = LocalUriHandler.current
urlHandler.openUri("<OAUTH-PAGE-URL>")
  • Get oatuh data from intent.data
LaunchedEffect(Unit) {
if (intent.data != null) {
val uri = intent.data
val accessToken = uri?.getQueryParameter("access_token")
val username = uri?.getQueryParameter("username")
val tokenType = uri?.getQueryParameter("token_type")
}
}
Future<List<Sound>> getApi() async {
final httpClient = HttpClient();
try {
final request = await httpClient.getUrl(Uri.parse("$hostname/sounds"))
..headers.add("X-API-Key", "kitty-secret-key");
final response = await request.close();
if (response.statusCode == HttpStatus.ok) {
final res = await response.transform(utf8.decoder).join();
final jsonData = jsonDecode(res);
List<Sound> sounds =
(jsonData as List).map((item) => Sound.fromJson(item)).toList();
print(sounds);
return sounds;
}
return [];
} catch (e) {
print("error $e");
return [];
} finally {
httpClient.close();
}
}
Future<void> postApi() async {
final payload = jsonEncode({
"soundId": 1,
"soundName": "test from flutter",
"alarmTime": "2025-07-16T17:33:59.116Z"
});
final client = HttpClient();
final request = await client.postUrl(Uri.parse("$hostname/alarms"))
..headers.add("X-API-Key", "kitty-secret-key")
..add(utf8.encode(payload));
request.close();
}